commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ab5d343a91374119b1983e93a57fd47ab09ebc63
|
plinyproj/__version__.py
|
plinyproj/__version__.py
|
<<<<<<< HEAD
__version_info__ = (0, 6, 0)
=======
__version_info__ = (0, 5, 7)
>>>>>>> hotfix/0.5.7
__version__ = '.'.join([str(i) for i in __version_info__])
|
__version_info__ = (0, 6, 0)
__version__ = '.'.join([str(i) for i in __version_info__])
|
Fix version conflict and bump to 0.6
|
Fix version conflict and bump to 0.6
|
Python
|
mit
|
bwhicks/PlinyProject,bwhicks/PlinyProject,bwhicks/PlinyProject,bwhicks/PlinyProject
|
<<<<<<< HEAD
__version_info__ = (0, 6, 0)
=======
__version_info__ = (0, 5, 7)
>>>>>>> hotfix/0.5.7
__version__ = '.'.join([str(i) for i in __version_info__])
Fix version conflict and bump to 0.6
|
__version_info__ = (0, 6, 0)
__version__ = '.'.join([str(i) for i in __version_info__])
|
<commit_before><<<<<<< HEAD
__version_info__ = (0, 6, 0)
=======
__version_info__ = (0, 5, 7)
>>>>>>> hotfix/0.5.7
__version__ = '.'.join([str(i) for i in __version_info__])
<commit_msg>Fix version conflict and bump to 0.6<commit_after>
|
__version_info__ = (0, 6, 0)
__version__ = '.'.join([str(i) for i in __version_info__])
|
<<<<<<< HEAD
__version_info__ = (0, 6, 0)
=======
__version_info__ = (0, 5, 7)
>>>>>>> hotfix/0.5.7
__version__ = '.'.join([str(i) for i in __version_info__])
Fix version conflict and bump to 0.6__version_info__ = (0, 6, 0)
__version__ = '.'.join([str(i) for i in __version_info__])
|
<commit_before><<<<<<< HEAD
__version_info__ = (0, 6, 0)
=======
__version_info__ = (0, 5, 7)
>>>>>>> hotfix/0.5.7
__version__ = '.'.join([str(i) for i in __version_info__])
<commit_msg>Fix version conflict and bump to 0.6<commit_after>__version_info__ = (0, 6, 0)
__version__ = '.'.join([str(i) for i in __version_info__])
|
b2ffa1616c1ca31916047e1524e73395f0f45936
|
bokeh/charts/tests/test_stats.py
|
bokeh/charts/tests/test_stats.py
|
import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
|
import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg')
assert len(b.bins) == 12
|
Add auto binning test to stats.
|
Add auto binning test to stats.
|
Python
|
bsd-3-clause
|
percyfal/bokeh,msarahan/bokeh,schoolie/bokeh,mindriot101/bokeh,Karel-van-de-Plassche/bokeh,timsnyder/bokeh,dennisobrien/bokeh,aiguofer/bokeh,phobson/bokeh,ericmjl/bokeh,phobson/bokeh,draperjames/bokeh,timsnyder/bokeh,stonebig/bokeh,timsnyder/bokeh,stonebig/bokeh,ptitjano/bokeh,maxalbert/bokeh,jakirkham/bokeh,DuCorey/bokeh,draperjames/bokeh,bokeh/bokeh,draperjames/bokeh,schoolie/bokeh,DuCorey/bokeh,schoolie/bokeh,philippjfr/bokeh,DuCorey/bokeh,percyfal/bokeh,philippjfr/bokeh,maxalbert/bokeh,msarahan/bokeh,jakirkham/bokeh,htygithub/bokeh,dennisobrien/bokeh,jakirkham/bokeh,KasperPRasmussen/bokeh,bokeh/bokeh,aiguofer/bokeh,schoolie/bokeh,schoolie/bokeh,dennisobrien/bokeh,DuCorey/bokeh,quasiben/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,timsnyder/bokeh,azjps/bokeh,ericmjl/bokeh,bokeh/bokeh,aiguofer/bokeh,bokeh/bokeh,mindriot101/bokeh,htygithub/bokeh,maxalbert/bokeh,quasiben/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,justacec/bokeh,htygithub/bokeh,clairetang6/bokeh,azjps/bokeh,ptitjano/bokeh,justacec/bokeh,aavanian/bokeh,ptitjano/bokeh,ptitjano/bokeh,aiguofer/bokeh,ptitjano/bokeh,bokeh/bokeh,dennisobrien/bokeh,percyfal/bokeh,draperjames/bokeh,KasperPRasmussen/bokeh,aavanian/bokeh,justacec/bokeh,phobson/bokeh,rs2/bokeh,philippjfr/bokeh,maxalbert/bokeh,stonebig/bokeh,rs2/bokeh,azjps/bokeh,phobson/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,msarahan/bokeh,philippjfr/bokeh,aavanian/bokeh,htygithub/bokeh,azjps/bokeh,draperjames/bokeh,jakirkham/bokeh,KasperPRasmussen/bokeh,DuCorey/bokeh,rs2/bokeh,azjps/bokeh,mindriot101/bokeh,justacec/bokeh,percyfal/bokeh,clairetang6/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,percyfal/bokeh,stonebig/bokeh,msarahan/bokeh,ericmjl/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,clairetang6/bokeh,rs2/bokeh,aavanian/bokeh,clairetang6/bokeh,mindriot101/bokeh,ericmjl/bokeh,quasiben/bokeh,phobson/bokeh,philippjfr/bokeh,timsnyder/bokeh,aiguofer/bokeh,Karel-van-de-Plassche/bokeh
|
import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2Add auto binning test to stats.
|
import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg')
assert len(b.bins) == 12
|
<commit_before>import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2<commit_msg>Add auto binning test to stats.<commit_after>
|
import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg')
assert len(b.bins) == 12
|
import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2Add auto binning test to stats.import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg')
assert len(b.bins) == 12
|
<commit_before>import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2<commit_msg>Add auto binning test to stats.<commit_after>import pytest
from bokeh.charts.stats import Bins
from bokeh.models import ColumnDataSource
@pytest.fixture
def ds(test_data):
return ColumnDataSource(test_data.auto_data)
def test_explicit_bin_count(ds):
b = Bins(source=ds, column='mpg', bin_count=2)
assert len(b.bins) == 2
def test_auto_bin_count(ds):
b = Bins(source=ds, column='mpg')
assert len(b.bins) == 12
|
688dfc501e336b29db2fd3a7dc7bda6a8446c5e9
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
def send_on_email(report, subject, mail_from, mail_to, smtp_auth):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
# s.set_debuglevel(2) # ToDo: parametrize it
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
|
# -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
from logging import INFO as LOGGING_INFO, DEBUG as LOGGING_DEBUG
def send_on_email(report, subject, mail_from, mail_to, smtp_auth, log_level=LOGGING_INFO):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
if log_level == LOGGING_DEBUG:
s.set_debuglevel(1)
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
"""
Get previous working day offset
:return:
"""
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
|
Add log level setting to mailer function
|
Add log level setting to mailer function
|
Python
|
mit
|
vv-p/jira-reports,vv-p/jira-reports
|
# -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
def send_on_email(report, subject, mail_from, mail_to, smtp_auth):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
# s.set_debuglevel(2) # ToDo: parametrize it
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
Add log level setting to mailer function
|
# -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
from logging import INFO as LOGGING_INFO, DEBUG as LOGGING_DEBUG
def send_on_email(report, subject, mail_from, mail_to, smtp_auth, log_level=LOGGING_INFO):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
if log_level == LOGGING_DEBUG:
s.set_debuglevel(1)
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
"""
Get previous working day offset
:return:
"""
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
|
<commit_before># -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
def send_on_email(report, subject, mail_from, mail_to, smtp_auth):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
# s.set_debuglevel(2) # ToDo: parametrize it
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
<commit_msg>Add log level setting to mailer function<commit_after>
|
# -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
from logging import INFO as LOGGING_INFO, DEBUG as LOGGING_DEBUG
def send_on_email(report, subject, mail_from, mail_to, smtp_auth, log_level=LOGGING_INFO):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
if log_level == LOGGING_DEBUG:
s.set_debuglevel(1)
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
"""
Get previous working day offset
:return:
"""
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
|
# -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
def send_on_email(report, subject, mail_from, mail_to, smtp_auth):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
# s.set_debuglevel(2) # ToDo: parametrize it
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
Add log level setting to mailer function# -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
from logging import INFO as LOGGING_INFO, DEBUG as LOGGING_DEBUG
def send_on_email(report, subject, mail_from, mail_to, smtp_auth, log_level=LOGGING_INFO):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
if log_level == LOGGING_DEBUG:
s.set_debuglevel(1)
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
"""
Get previous working day offset
:return:
"""
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
|
<commit_before># -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
def send_on_email(report, subject, mail_from, mail_to, smtp_auth):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
# s.set_debuglevel(2) # ToDo: parametrize it
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
<commit_msg>Add log level setting to mailer function<commit_after># -*- coding: utf-8 -*-
import smtplib
import arrow
from email.mime.text import MIMEText
from logging import INFO as LOGGING_INFO, DEBUG as LOGGING_DEBUG
def send_on_email(report, subject, mail_from, mail_to, smtp_auth, log_level=LOGGING_INFO):
smtp_login, smtp_password = smtp_auth
msg = MIMEText(report.encode('utf-8'), 'html', 'utf-8')
msg["Subject"] = subject
msg["From"] = mail_from
to = ', '.join(mail_to.split())
msg["To"] = to
s = smtplib.SMTP_SSL('smtp.mail.ru', 465)
if log_level == LOGGING_DEBUG:
s.set_debuglevel(1)
s.login(smtp_login, smtp_password)
s.sendmail(smtp_login, mail_to, msg.as_string()) # sender address must match authenticated user
def get_day_offset():
"""
Get previous working day offset
:return:
"""
now = arrow.now()
offsets = (3, 1, 1, 1, 1, 1, 2)
return offsets[now.weekday()]
def get_jira_list(l):
"""
Format any lists to jira string
:param l: list to format ["aaa", "bbb", "ccc"]
:return: string looks like '"aaa", "bbb", "ccc"'
"""
return ', '.join('"%s"' % x for x in l)
|
ea7b84d4685a48e13bd58cfd52d14fff4ed7001a
|
main.py
|
main.py
|
from game import Game
from self_play import SelfPlay
g = Game()
runtime = SelfPlay(g)
runtime.play()
|
from game import Game
from self_play import SelfPlay
import game_stats_tree
g = Game()
runtime = SelfPlay(g)
runtime.play()
game_stats_tree = game_stats_tree.Node()
update_game_stats(game_stats_tree, runtime.log)
|
Add log to self play
|
Add log to self play
|
Python
|
mit
|
misterwilliam/connect-four
|
from game import Game
from self_play import SelfPlay
g = Game()
runtime = SelfPlay(g)
runtime.play()
Add log to self play
|
from game import Game
from self_play import SelfPlay
import game_stats_tree
g = Game()
runtime = SelfPlay(g)
runtime.play()
game_stats_tree = game_stats_tree.Node()
update_game_stats(game_stats_tree, runtime.log)
|
<commit_before>from game import Game
from self_play import SelfPlay
g = Game()
runtime = SelfPlay(g)
runtime.play()
<commit_msg>Add log to self play<commit_after>
|
from game import Game
from self_play import SelfPlay
import game_stats_tree
g = Game()
runtime = SelfPlay(g)
runtime.play()
game_stats_tree = game_stats_tree.Node()
update_game_stats(game_stats_tree, runtime.log)
|
from game import Game
from self_play import SelfPlay
g = Game()
runtime = SelfPlay(g)
runtime.play()
Add log to self playfrom game import Game
from self_play import SelfPlay
import game_stats_tree
g = Game()
runtime = SelfPlay(g)
runtime.play()
game_stats_tree = game_stats_tree.Node()
update_game_stats(game_stats_tree, runtime.log)
|
<commit_before>from game import Game
from self_play import SelfPlay
g = Game()
runtime = SelfPlay(g)
runtime.play()
<commit_msg>Add log to self play<commit_after>from game import Game
from self_play import SelfPlay
import game_stats_tree
g = Game()
runtime = SelfPlay(g)
runtime.play()
game_stats_tree = game_stats_tree.Node()
update_game_stats(game_stats_tree, runtime.log)
|
df4967b5e71e32f70e97d52a320d9b32d70095b7
|
main.py
|
main.py
|
#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def now_playing(itunes):
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def is_playing(itunes):
return itunes.player_state.get() == k.playing
def now_playing(itunes):
if not is_playing(itunes):
return play(itunes)
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
if is_playing(itunes):
return play_next(itunes)
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
|
Check if song is_playing before play
|
Check if song is_playing before play
|
Python
|
mit
|
kshvmdn/nowplaying
|
#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def now_playing(itunes):
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
Check if song is_playing before play
|
#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def is_playing(itunes):
return itunes.player_state.get() == k.playing
def now_playing(itunes):
if not is_playing(itunes):
return play(itunes)
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
if is_playing(itunes):
return play_next(itunes)
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def now_playing(itunes):
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
<commit_msg>Check if song is_playing before play<commit_after>
|
#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def is_playing(itunes):
return itunes.player_state.get() == k.playing
def now_playing(itunes):
if not is_playing(itunes):
return play(itunes)
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
if is_playing(itunes):
return play_next(itunes)
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def now_playing(itunes):
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
Check if song is_playing before play#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def is_playing(itunes):
return itunes.player_state.get() == k.playing
def now_playing(itunes):
if not is_playing(itunes):
return play(itunes)
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
if is_playing(itunes):
return play_next(itunes)
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def now_playing(itunes):
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
<commit_msg>Check if song is_playing before play<commit_after>#!/usr/bin/env python3
import sys
from appscript import *
from termcolor import colored, cprint
def open(itunes):
return itunes.activate()
def close(itunes):
return itunes.quit()
def is_playing(itunes):
return itunes.player_state.get() == k.playing
def now_playing(itunes):
if not is_playing(itunes):
return play(itunes)
track = itunes.current_track.get()
return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']),
track.artist(),
track.album()))
def play(itunes):
if is_playing(itunes):
return play_next(itunes)
itunes.play()
return now_playing(itunes)
def stop(itunes):
return itunes.stop()
def main():
cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \
app('System Events').processes[its.name == 'iTunes'].count(), \
app('iTunes')
if not is_open == 1:
open(itunes)
cmds = {
'np': now_playing,
'play': play,
'show': open,
'stop': stop,
'close': close
}
cmd = cmds[cmd] if cmd in cmds else now_playing
return cmd(itunes)
if __name__ == '__main__':
main()
|
fe308f4b9372a4839af4d2374500c7a421fca14b
|
app/schedule/tasks.py
|
app/schedule/tasks.py
|
from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
|
from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
return messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
|
Return the message status to the message broker
|
fix: Return the message status to the message broker
|
Python
|
agpl-3.0
|
agendaodonto/server,agendaodonto/server
|
from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
fix: Return the message status to the message broker
|
from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
return messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
|
<commit_before>from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
<commit_msg>fix: Return the message status to the message broker<commit_after>
|
from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
return messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
|
from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
fix: Return the message status to the message brokerfrom django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
return messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
|
<commit_before>from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
<commit_msg>fix: Return the message status to the message broker<commit_after>from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
return messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
|
8ef4417a95fdd9b5dde26583a9624181639df600
|
nix/__init__.py
|
nix/__init__.py
|
# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
__author__ = "Christian Kellner"
|
# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
del BlockMixin, FileMixin, SectionMixin, PropertyMixin, ValueMixin, SourceMixin, DataArrayMixin, TagMixin
del MultiTagMixin, DataArrySourcesMixin, MultiTagSourcesMixin, TagSourcesMixin
__author__ = 'Christian Kellner, Adrian Stoewer, Andrey Sobolev, Jan Grewe, Balint Morvai'
|
Remove mixins from namespace after monkey patching
|
Remove mixins from namespace after monkey patching
After their job is done, they can go home ;-)
|
Python
|
bsd-3-clause
|
stoewer/nixpy,stoewer/nixpy
|
# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
__author__ = "Christian Kellner"
Remove mixins from namespace after monkey patching
After their job is done, they can go home ;-)
|
# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
del BlockMixin, FileMixin, SectionMixin, PropertyMixin, ValueMixin, SourceMixin, DataArrayMixin, TagMixin
del MultiTagMixin, DataArrySourcesMixin, MultiTagSourcesMixin, TagSourcesMixin
__author__ = 'Christian Kellner, Adrian Stoewer, Andrey Sobolev, Jan Grewe, Balint Morvai'
|
<commit_before># Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
__author__ = "Christian Kellner"
<commit_msg>Remove mixins from namespace after monkey patching
After their job is done, they can go home ;-)<commit_after>
|
# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
del BlockMixin, FileMixin, SectionMixin, PropertyMixin, ValueMixin, SourceMixin, DataArrayMixin, TagMixin
del MultiTagMixin, DataArrySourcesMixin, MultiTagSourcesMixin, TagSourcesMixin
__author__ = 'Christian Kellner, Adrian Stoewer, Andrey Sobolev, Jan Grewe, Balint Morvai'
|
# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
__author__ = "Christian Kellner"
Remove mixins from namespace after monkey patching
After their job is done, they can go home ;-)# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
del BlockMixin, FileMixin, SectionMixin, PropertyMixin, ValueMixin, SourceMixin, DataArrayMixin, TagMixin
del MultiTagMixin, DataArrySourcesMixin, MultiTagSourcesMixin, TagSourcesMixin
__author__ = 'Christian Kellner, Adrian Stoewer, Andrey Sobolev, Jan Grewe, Balint Morvai'
|
<commit_before># Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
__author__ = "Christian Kellner"
<commit_msg>Remove mixins from namespace after monkey patching
After their job is done, they can go home ;-)<commit_after># Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from nix.core import File, FileMode, Block, DataType, Section, Property, Value, \
Source, DataArray, RangeDimension, SetDimension, SampledDimension, \
DimensionType, Feature, LinkType, Tag, MultiTag
from nix.block import BlockMixin
from nix.file import FileMixin
from nix.section import SectionMixin
from nix.property import PropertyMixin, ValueMixin
from nix.source import SourceMixin
from nix.data_array import DataArrayMixin
from nix.tag import TagMixin
from nix.multi_tag import MultiTagMixin
from nix.entity_with_sources import DataArrySourcesMixin, MultiTagSourcesMixin, \
TagSourcesMixin
__all__ = ("File", "FileMode", "Block", "DataType", "Section", "Property",
"Value", "Source", "DataArray", "RangeDimension", "SetDimension",
"SampledDimension", "DimensionType", "Feature", "LinkType",
"Tag", "MultiTag")
del BlockMixin, FileMixin, SectionMixin, PropertyMixin, ValueMixin, SourceMixin, DataArrayMixin, TagMixin
del MultiTagMixin, DataArrySourcesMixin, MultiTagSourcesMixin, TagSourcesMixin
__author__ = 'Christian Kellner, Adrian Stoewer, Andrey Sobolev, Jan Grewe, Balint Morvai'
|
69d6d87688d9f805689407b839c4fb88f397269e
|
cla_backend/apps/status/views.py
|
cla_backend/apps/status/views.py
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
Revert "Deliberately break status check"
|
Revert "Deliberately break status check"
This reverts commit da7f671ec287afc0c42f58794053b8bf69ddf620.
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
Revert "Deliberately break status check"
This reverts commit da7f671ec287afc0c42f58794053b8bf69ddf620.
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
<commit_before>from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
<commit_msg>Revert "Deliberately break status check"
This reverts commit da7f671ec287afc0c42f58794053b8bf69ddf620.<commit_after>
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
Revert "Deliberately break status check"
This reverts commit da7f671ec287afc0c42f58794053b8bf69ddf620.from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
<commit_before>from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
db_ready = False
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
<commit_msg>Revert "Deliberately break status check"
This reverts commit da7f671ec287afc0c42f58794053b8bf69ddf620.<commit_after>from django.db import connection, DatabaseError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.renderers import JSONRenderer
from cla_common.smoketest import smoketest
from moj_irat.views import PingJsonView as BasePingJsonView
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs["content_type"] = "application/json"
super(JSONResponse, self).__init__(content, **kwargs)
@csrf_exempt
def status(request):
if request.method == "GET":
message = ""
c = None
try:
c = connection.cursor()
c.execute("SELECT 1")
row = c.fetchone()
db_ready = row[0] == 1
return JSONResponse({"db": {"ready": db_ready, "message": message}})
except DatabaseError as e:
message = str(e)
finally:
if c:
c.close()
@csrf_exempt
def smoketests(request):
"""
Run smoke tests and return results as JSON datastructure
"""
from cla_backend.apps.status.tests.smoketests import SmokeTests
return JSONResponse(smoketest(SmokeTests))
class PingJsonView(BasePingJsonView):
CONTRACT_2018_ENABLED_key = None
|
661c9413900d74baa308feec3232bc3c9edee361
|
repl.py
|
repl.py
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
from runtime import tokenizer, parser, ast, std
TEA_VERSION = "0.0.3-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.tree_to_string()
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = std.default_context()
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags: return
print(output)
if __name__ == "__main__":
main()
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import tokenizer, parser, env
TEA_VERSION = "0.0.4-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.eval(context)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
Fix bugs and increment version number
|
Fix bugs and increment version number
|
Python
|
mit
|
lnsp/tea,lnsp/tea
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
from runtime import tokenizer, parser, ast, std
TEA_VERSION = "0.0.3-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.tree_to_string()
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = std.default_context()
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags: return
print(output)
if __name__ == "__main__":
main()
Fix bugs and increment version number
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import tokenizer, parser, env
TEA_VERSION = "0.0.4-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.eval(context)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python3
"""Command line runtime for Tea."""
from runtime import tokenizer, parser, ast, std
TEA_VERSION = "0.0.3-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.tree_to_string()
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = std.default_context()
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags: return
print(output)
if __name__ == "__main__":
main()
<commit_msg>Fix bugs and increment version number<commit_after>
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import tokenizer, parser, env
TEA_VERSION = "0.0.4-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.eval(context)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
from runtime import tokenizer, parser, ast, std
TEA_VERSION = "0.0.3-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.tree_to_string()
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = std.default_context()
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags: return
print(output)
if __name__ == "__main__":
main()
Fix bugs and increment version number#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import tokenizer, parser, env
TEA_VERSION = "0.0.4-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.eval(context)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python3
"""Command line runtime for Tea."""
from runtime import tokenizer, parser, ast, std
TEA_VERSION = "0.0.3-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.tree_to_string()
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = std.default_context()
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags: return
print(output)
if __name__ == "__main__":
main()
<commit_msg>Fix bugs and increment version number<commit_after>#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import tokenizer, parser, env
TEA_VERSION = "0.0.4-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = tokenizer.apply(expression)
tree = parser.generate(tokens)
return tree.eval(context)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
542ec659bf8545fcb50d0a4df068aa28d073dacc
|
cmsplugin_zinnia/forms.py
|
cmsplugin_zinnia/forms.py
|
"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
|
"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
fields = '__all__'
|
Add fields attribute to CalendarEntriesAdminForm.
|
Add fields attribute to CalendarEntriesAdminForm.
Django 1.8 requires either the 'fields' or 'exclude' attribute to be set for modelForm. Omitting any definition of the fields to use will result in an ImproperlyConfigured exception. See https://docs.djangoproject.com/en/1.8/ref/forms/models/.
|
Python
|
bsd-3-clause
|
bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,bittner/cmsplugin-zinnia
|
"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
Add fields attribute to CalendarEntriesAdminForm.
Django 1.8 requires either the 'fields' or 'exclude' attribute to be set for modelForm. Omitting any definition of the fields to use will result in an ImproperlyConfigured exception. See https://docs.djangoproject.com/en/1.8/ref/forms/models/.
|
"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
fields = '__all__'
|
<commit_before>"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
<commit_msg>Add fields attribute to CalendarEntriesAdminForm.
Django 1.8 requires either the 'fields' or 'exclude' attribute to be set for modelForm. Omitting any definition of the fields to use will result in an ImproperlyConfigured exception. See https://docs.djangoproject.com/en/1.8/ref/forms/models/.<commit_after>
|
"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
fields = '__all__'
|
"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
Add fields attribute to CalendarEntriesAdminForm.
Django 1.8 requires either the 'fields' or 'exclude' attribute to be set for modelForm. Omitting any definition of the fields to use will result in an ImproperlyConfigured exception. See https://docs.djangoproject.com/en/1.8/ref/forms/models/."""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
fields = '__all__'
|
<commit_before>"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
<commit_msg>Add fields attribute to CalendarEntriesAdminForm.
Django 1.8 requires either the 'fields' or 'exclude' attribute to be set for modelForm. Omitting any definition of the fields to use will result in an ImproperlyConfigured exception. See https://docs.djangoproject.com/en/1.8/ref/forms/models/.<commit_after>"""Forms for cmsplugin-zinnia"""
from django import forms
from django.utils.translation import ugettext as _
from cmsplugin_zinnia.models import CalendarEntriesPlugin
class CalendarEntriesAdminForm(forms.ModelForm):
"""
Admin Form for CalendarEntriesPlugin
"""
def clean(self):
data = self.cleaned_data
if int(bool(data.get('year'))) + int(bool(data.get('month'))) == 1:
raise forms.ValidationError(
_('Year and month must defined together.'))
return data
class Meta:
model = CalendarEntriesPlugin
fields = '__all__'
|
4a8e1c8ef51e38ceb8ae8cabcb61651f59f601c0
|
urls.py
|
urls.py
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessor/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessorFamily/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
Update Root URL config to set the default mapping
|
Update Root URL config to set the default mapping
|
Python
|
bsd-2-clause
|
ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessor/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)Update Root URL config to set the default mapping
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessorFamily/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
<commit_before>__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessor/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)<commit_msg>Update Root URL config to set the default mapping<commit_after>
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessorFamily/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessor/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)Update Root URL config to set the default mapping__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessorFamily/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
<commit_before>__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessor/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)<commit_msg>Update Root URL config to set the default mapping<commit_after>__author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/plots/avgVGRvsProcessorFamily/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
|
97418e6815faacbaa46a3a29bef0c4c0454bede1
|
urls.py
|
urls.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
from django.views.static import serve
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
|
Support for string view arguments to url() will be removed
|
[DJ1.10] Support for string view arguments to url() will be removed
|
Python
|
apache-2.0
|
benadida/helios-server,shirlei/helios-server,shirlei/helios-server,benadida/helios-server,benadida/helios-server,shirlei/helios-server,benadida/helios-server,shirlei/helios-server,shirlei/helios-server,benadida/helios-server
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
[DJ1.10] Support for string view arguments to url() will be removed
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
from django.views.static import serve
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
|
<commit_before># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
<commit_msg>[DJ1.10] Support for string view arguments to url() will be removed<commit_after>
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
from django.views.static import serve
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
[DJ1.10] Support for string view arguments to url() will be removed# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
from django.views.static import serve
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
|
<commit_before># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
<commit_msg>[DJ1.10] Support for string view arguments to url() will be removed<commit_after># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import url, include
from django.views.static import serve
urlpatterns = [
url(r'^auth/', include('helios_auth.urls')),
url(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
url(r'booth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
url(r'verifier/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
url(r'static/auth/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
url(r'static/helios/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/helios/media'}),
url(r'static/(?P<path>.*)$', serve, {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
url(r'^', include('server_ui.urls')),
]
|
96a2839da4963303ace6e147bb436d2e24a5efd4
|
main.py
|
main.py
|
from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.inf.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
if (len(active.items()) + len(scheduled.items())) > 0:
return True
else:
return False
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
|
from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.upol.cz")
db.insert_url("http://www.cmtf.upol.cz")
db.insert_url("http://www.lf.upol.cz")
db.insert_url("http://www.ff.upol.cz")
db.insert_url("http://www.prf.upol.cz")
db.insert_url("http://www.pdf.upol.cz")
db.insert_url("http://ftk.upol.cz")
db.insert_url("http://www.pf.upol.cz")
db.insert_url("http://www.fzv.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
reserved = inspect.reserved()
active_number = len(list(active.values())[0])
scheduled_number = len(list(scheduled.values())[0])
reserved_number = len(list(reserved.values())[0])
if active_number + scheduled_number + reserved_number > 0:
return True
else:
return False
start_time = datetime.datetime.now()
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
end_time = datetime.datetime.now()
elapsed = end_time - start_time
print(str(elapsed))
|
Fix is_worker_running + new seed
|
Fix is_worker_running + new seed
|
Python
|
mit
|
UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine
|
from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.inf.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
if (len(active.items()) + len(scheduled.items())) > 0:
return True
else:
return False
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
Fix is_worker_running + new seed
|
from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.upol.cz")
db.insert_url("http://www.cmtf.upol.cz")
db.insert_url("http://www.lf.upol.cz")
db.insert_url("http://www.ff.upol.cz")
db.insert_url("http://www.prf.upol.cz")
db.insert_url("http://www.pdf.upol.cz")
db.insert_url("http://ftk.upol.cz")
db.insert_url("http://www.pf.upol.cz")
db.insert_url("http://www.fzv.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
reserved = inspect.reserved()
active_number = len(list(active.values())[0])
scheduled_number = len(list(scheduled.values())[0])
reserved_number = len(list(reserved.values())[0])
if active_number + scheduled_number + reserved_number > 0:
return True
else:
return False
start_time = datetime.datetime.now()
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
end_time = datetime.datetime.now()
elapsed = end_time - start_time
print(str(elapsed))
|
<commit_before>from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.inf.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
if (len(active.items()) + len(scheduled.items())) > 0:
return True
else:
return False
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
<commit_msg>Fix is_worker_running + new seed<commit_after>
|
from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.upol.cz")
db.insert_url("http://www.cmtf.upol.cz")
db.insert_url("http://www.lf.upol.cz")
db.insert_url("http://www.ff.upol.cz")
db.insert_url("http://www.prf.upol.cz")
db.insert_url("http://www.pdf.upol.cz")
db.insert_url("http://ftk.upol.cz")
db.insert_url("http://www.pf.upol.cz")
db.insert_url("http://www.fzv.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
reserved = inspect.reserved()
active_number = len(list(active.values())[0])
scheduled_number = len(list(scheduled.values())[0])
reserved_number = len(list(reserved.values())[0])
if active_number + scheduled_number + reserved_number > 0:
return True
else:
return False
start_time = datetime.datetime.now()
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
end_time = datetime.datetime.now()
elapsed = end_time - start_time
print(str(elapsed))
|
from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.inf.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
if (len(active.items()) + len(scheduled.items())) > 0:
return True
else:
return False
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
Fix is_worker_running + new seedfrom crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.upol.cz")
db.insert_url("http://www.cmtf.upol.cz")
db.insert_url("http://www.lf.upol.cz")
db.insert_url("http://www.ff.upol.cz")
db.insert_url("http://www.prf.upol.cz")
db.insert_url("http://www.pdf.upol.cz")
db.insert_url("http://ftk.upol.cz")
db.insert_url("http://www.pf.upol.cz")
db.insert_url("http://www.fzv.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
reserved = inspect.reserved()
active_number = len(list(active.values())[0])
scheduled_number = len(list(scheduled.values())[0])
reserved_number = len(list(reserved.values())[0])
if active_number + scheduled_number + reserved_number > 0:
return True
else:
return False
start_time = datetime.datetime.now()
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
end_time = datetime.datetime.now()
elapsed = end_time - start_time
print(str(elapsed))
|
<commit_before>from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.inf.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
if (len(active.items()) + len(scheduled.items())) > 0:
return True
else:
return False
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
<commit_msg>Fix is_worker_running + new seed<commit_after>from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.upol.cz")
db.insert_url("http://www.cmtf.upol.cz")
db.insert_url("http://www.lf.upol.cz")
db.insert_url("http://www.ff.upol.cz")
db.insert_url("http://www.prf.upol.cz")
db.insert_url("http://www.pdf.upol.cz")
db.insert_url("http://ftk.upol.cz")
db.insert_url("http://www.pf.upol.cz")
db.insert_url("http://www.fzv.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
reserved = inspect.reserved()
active_number = len(list(active.values())[0])
scheduled_number = len(list(scheduled.values())[0])
reserved_number = len(list(reserved.values())[0])
if active_number + scheduled_number + reserved_number > 0:
return True
else:
return False
start_time = datetime.datetime.now()
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
end_time = datetime.datetime.now()
elapsed = end_time - start_time
print(str(elapsed))
|
dc87229eeeef35325d72a1b97e0790204673a5aa
|
main.py
|
main.py
|
from curses import wrapper
from ui import ChatUI
from client import Client
import ConfigParser
def main(stdscr):
cp = ConfigParser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
|
from curses import wrapper
from ui import ChatUI
from client import Client
import configparser
def main(stdscr):
cp = configparser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password').encode('utf-8')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
|
Make it work with Py3
|
Make it work with Py3
|
Python
|
mit
|
vhf/kwak_cli
|
from curses import wrapper
from ui import ChatUI
from client import Client
import ConfigParser
def main(stdscr):
cp = ConfigParser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
Make it work with Py3
|
from curses import wrapper
from ui import ChatUI
from client import Client
import configparser
def main(stdscr):
cp = configparser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password').encode('utf-8')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
|
<commit_before>from curses import wrapper
from ui import ChatUI
from client import Client
import ConfigParser
def main(stdscr):
cp = ConfigParser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
<commit_msg>Make it work with Py3<commit_after>
|
from curses import wrapper
from ui import ChatUI
from client import Client
import configparser
def main(stdscr):
cp = configparser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password').encode('utf-8')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
|
from curses import wrapper
from ui import ChatUI
from client import Client
import ConfigParser
def main(stdscr):
cp = ConfigParser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
Make it work with Py3from curses import wrapper
from ui import ChatUI
from client import Client
import configparser
def main(stdscr):
cp = configparser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password').encode('utf-8')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
|
<commit_before>from curses import wrapper
from ui import ChatUI
from client import Client
import ConfigParser
def main(stdscr):
cp = ConfigParser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
<commit_msg>Make it work with Py3<commit_after>from curses import wrapper
from ui import ChatUI
from client import Client
import configparser
def main(stdscr):
cp = configparser.ConfigParser()
cp.read('config.cfg')
username = cp.get('credentials', 'username')
password = cp.get('credentials', 'password').encode('utf-8')
stdscr.clear()
ui = ChatUI(stdscr)
client = Client(username, password, ui)
client.subscribe_to_channel('main')
client.subscribe_to_users()
message = ''
while message != '/quit':
message = ui.wait_input()
if message[0:6] == '/join ':
client.subscribe_to_channel(message[6:])
else:
client.client.insert('messages', {'channel': client.current_channel, 'text': message})
wrapper(main)
|
3e89c102e9a47de1288b268b04a11ff73a22cd2e
|
main.py
|
main.py
|
# coding: utf-8
from web import app
import db, config
import os.path
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
db.init_db()
app.run(debug = True)
|
# coding: utf-8
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
import db
from web import app
db.init_db()
app.run(debug = True)
|
Fix handling of missing config file
|
Fix handling of missing config file
|
Python
|
agpl-3.0
|
hhm0/supysonic,nwokeo/supysonic,ezpuzz/supysonic,spl0k/supysonic,hhm0/supysonic,spl0k/supysonic,nwokeo/supysonic,nwokeo/supysonic,hhm0/supysonic,nwokeo/supysonic,spl0k/supysonic,nwokeo/supysonic,ezpuzz/supysonic
|
# coding: utf-8
from web import app
import db, config
import os.path
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
db.init_db()
app.run(debug = True)
Fix handling of missing config file
|
# coding: utf-8
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
import db
from web import app
db.init_db()
app.run(debug = True)
|
<commit_before># coding: utf-8
from web import app
import db, config
import os.path
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
db.init_db()
app.run(debug = True)
<commit_msg>Fix handling of missing config file<commit_after>
|
# coding: utf-8
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
import db
from web import app
db.init_db()
app.run(debug = True)
|
# coding: utf-8
from web import app
import db, config
import os.path
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
db.init_db()
app.run(debug = True)
Fix handling of missing config file# coding: utf-8
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
import db
from web import app
db.init_db()
app.run(debug = True)
|
<commit_before># coding: utf-8
from web import app
import db, config
import os.path
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
db.init_db()
app.run(debug = True)
<commit_msg>Fix handling of missing config file<commit_after># coding: utf-8
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
print >>sys.stderr, "Couldn't find configuration file"
sys.exit(1)
if not os.path.exists(config.get('CACHE_DIR')):
os.makedirs(config.get('CACHE_DIR'))
import db
from web import app
db.init_db()
app.run(debug = True)
|
a290a53a4323ad20ca0692f603cd2fc7a6c85c18
|
test_patois.py
|
test_patois.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for patois."""
from __future__ import (print_function, absolute_import,
unicode_literals, division)
# Copyright (c) 2014 Hank Gay
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import unittest
import patois
class TestPatoisFunctions(unittest.TestCase):
def test_module_name_from_file_name(self):
# Standard CPython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a.py'))
# Jython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a$py.class'))
if __name__ == '__main__':
unittest.main()
|
Add test for first compatibility function.
|
TDD: Add test for first compatibility function.
|
Python
|
mit
|
gthank/patois,gthank/patois
|
TDD: Add test for first compatibility function.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for patois."""
from __future__ import (print_function, absolute_import,
unicode_literals, division)
# Copyright (c) 2014 Hank Gay
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import unittest
import patois
class TestPatoisFunctions(unittest.TestCase):
def test_module_name_from_file_name(self):
# Standard CPython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a.py'))
# Jython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a$py.class'))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>TDD: Add test for first compatibility function.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for patois."""
from __future__ import (print_function, absolute_import,
unicode_literals, division)
# Copyright (c) 2014 Hank Gay
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import unittest
import patois
class TestPatoisFunctions(unittest.TestCase):
def test_module_name_from_file_name(self):
# Standard CPython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a.py'))
# Jython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a$py.class'))
if __name__ == '__main__':
unittest.main()
|
TDD: Add test for first compatibility function.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for patois."""
from __future__ import (print_function, absolute_import,
unicode_literals, division)
# Copyright (c) 2014 Hank Gay
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import unittest
import patois
class TestPatoisFunctions(unittest.TestCase):
def test_module_name_from_file_name(self):
# Standard CPython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a.py'))
# Jython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a$py.class'))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>TDD: Add test for first compatibility function.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for patois."""
from __future__ import (print_function, absolute_import,
unicode_literals, division)
# Copyright (c) 2014 Hank Gay
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import unittest
import patois
class TestPatoisFunctions(unittest.TestCase):
def test_module_name_from_file_name(self):
# Standard CPython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a.py'))
# Jython bytecode filename
self.assertEqual('a', patois.module_name_from_file_name('a$py.class'))
if __name__ == '__main__':
unittest.main()
|
|
1fa3c49f692e311e67db4f128928ac93e51830ff
|
babybuddy/tests/tests_commands.py
|
babybuddy/tests/tests_commands.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
"""def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)"""
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TransactionTestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TransactionTestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)
|
Fix and re-enable the reset management command test.
|
Fix and re-enable the reset management command test.
Not 100% sure of why this fixes the issue - it appears that changes to django.test.TestCase in Django 2.0 led to the test failing.
|
Python
|
bsd-2-clause
|
cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
"""def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)"""
Fix and re-enable the reset management command test.
Not 100% sure of why this fixes the issue - it appears that changes to django.test.TestCase in Django 2.0 led to the test failing.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TransactionTestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TransactionTestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
"""def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)"""
<commit_msg>Fix and re-enable the reset management command test.
Not 100% sure of why this fixes the issue - it appears that changes to django.test.TestCase in Django 2.0 led to the test failing.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TransactionTestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TransactionTestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
"""def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)"""
Fix and re-enable the reset management command test.
Not 100% sure of why this fixes the issue - it appears that changes to django.test.TestCase in Django 2.0 led to the test failing.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TransactionTestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TransactionTestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
"""def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)"""
<commit_msg>Fix and re-enable the reset management command test.
Not 100% sure of why this fixes the issue - it appears that changes to django.test.TestCase in Django 2.0 led to the test failing.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TransactionTestCase
from django.contrib.auth.models import User
from django.core.management import call_command
from core.models import Child
class CommandsTestCase(TransactionTestCase):
def test_migrate(self):
call_command('migrate', verbosity=0)
self.assertIsInstance(User.objects.get(username='admin'), User)
def test_fake(self):
call_command('migrate', verbosity=0)
call_command('fake', children=1, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 1)
call_command('fake', children=2, days=7, verbosity=0)
self.assertEqual(Child.objects.count(), 3)
def test_reset(self):
call_command('reset', verbosity=0, interactive=False)
self.assertIsInstance(User.objects.get(username='admin'), User)
self.assertEqual(Child.objects.count(), 1)
|
3157151d835377a4ddf80d5514ea1edc0a2a8203
|
account/decorators.py
|
account/decorators.py
|
import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
|
import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=functools.WRAPPER_ASSIGNMENTS)
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
|
Fix a 3.0 compat issue
|
Fix a 3.0 compat issue
|
Python
|
mit
|
pinax/django-user-accounts,pinax/django-user-accounts
|
import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
Fix a 3.0 compat issue
|
import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=functools.WRAPPER_ASSIGNMENTS)
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
|
<commit_before>import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
<commit_msg>Fix a 3.0 compat issue<commit_after>
|
import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=functools.WRAPPER_ASSIGNMENTS)
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
|
import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
Fix a 3.0 compat issueimport functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=functools.WRAPPER_ASSIGNMENTS)
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
|
<commit_before>import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
<commit_msg>Fix a 3.0 compat issue<commit_after>import functools
from django.contrib.auth import REDIRECT_FIELD_NAME
from account.utils import handle_redirect_to_login
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=functools.WRAPPER_ASSIGNMENTS)
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated:
return view_func(request, *args, **kwargs)
return handle_redirect_to_login(
request,
redirect_field_name=redirect_field_name,
login_url=login_url
)
return _wrapped_view
if func:
return decorator(func)
return decorator
|
b1ffe99226cae1da873d249d8299b4b6b721dde3
|
go_contacts/server.py
|
go_contacts/server.py
|
"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, **settings):
self.backend = RiakContactsBackend(settings.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
|
"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone.handlers import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, config_file=None, **settings):
config_dict = self.get_config_settings(config_file)
self.backend = RiakContactsBackend(config_dict.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
|
Read config file passed as --appopts param.
|
Read config file passed as --appopts param.
|
Python
|
bsd-3-clause
|
praekelt/go-contacts-api,praekelt/go-contacts-api
|
"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, **settings):
self.backend = RiakContactsBackend(settings.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
Read config file passed as --appopts param.
|
"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone.handlers import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, config_file=None, **settings):
config_dict = self.get_config_settings(config_file)
self.backend = RiakContactsBackend(config_dict.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
|
<commit_before>"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, **settings):
self.backend = RiakContactsBackend(settings.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
<commit_msg>Read config file passed as --appopts param.<commit_after>
|
"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone.handlers import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, config_file=None, **settings):
config_dict = self.get_config_settings(config_file)
self.backend = RiakContactsBackend(config_dict.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
|
"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, **settings):
self.backend = RiakContactsBackend(settings.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
Read config file passed as --appopts param."""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone.handlers import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, config_file=None, **settings):
config_dict = self.get_config_settings(config_file)
self.backend = RiakContactsBackend(config_dict.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
|
<commit_before>"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, **settings):
self.backend = RiakContactsBackend(settings.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
<commit_msg>Read config file passed as --appopts param.<commit_after>"""
Cyclone application for Vumi Go contacts API.
"""
from go_api.cyclone.handlers import ApiApplication
from go_contacts.backends.riak import RiakContactsBackend
class ContactsApi(ApiApplication):
"""
:param IContactsBackend backend:
A backend that provides a contact collection factory.
"""
def __init__(self, config_file=None, **settings):
config_dict = self.get_config_settings(config_file)
self.backend = RiakContactsBackend(config_dict.pop('riak_config'))
ApiApplication.__init__(self, **settings)
@property
def collections(self):
return (
('/', self.backend.get_contact_collection),
)
|
63a79aaea5aa7124d753a2d7b70645bd2e1f4419
|
globus_cli/parser/parse_cmd.py
|
globus_cli/parser/parse_cmd.py
|
from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
args.func(args)
|
from __future__ import print_function
import sys
from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
try:
args.func(args)
except NotImplementedError as e:
print('NotImplementedError: {}'.format(e.message), file=sys.stderr)
sys.exit(1)
|
Add special handling for NotImplementedError
|
Add special handling for NotImplementedError
If the function called by run_command() raises a NotImplementedError,
don't print the full stacktrace. Do a mild amount of custom formatting,
then exit with status 1 (failure).
|
Python
|
apache-2.0
|
globus/globus-cli,globus/globus-cli
|
from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
args.func(args)
Add special handling for NotImplementedError
If the function called by run_command() raises a NotImplementedError,
don't print the full stacktrace. Do a mild amount of custom formatting,
then exit with status 1 (failure).
|
from __future__ import print_function
import sys
from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
try:
args.func(args)
except NotImplementedError as e:
print('NotImplementedError: {}'.format(e.message), file=sys.stderr)
sys.exit(1)
|
<commit_before>from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
args.func(args)
<commit_msg>Add special handling for NotImplementedError
If the function called by run_command() raises a NotImplementedError,
don't print the full stacktrace. Do a mild amount of custom formatting,
then exit with status 1 (failure).<commit_after>
|
from __future__ import print_function
import sys
from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
try:
args.func(args)
except NotImplementedError as e:
print('NotImplementedError: {}'.format(e.message), file=sys.stderr)
sys.exit(1)
|
from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
args.func(args)
Add special handling for NotImplementedError
If the function called by run_command() raises a NotImplementedError,
don't print the full stacktrace. Do a mild amount of custom formatting,
then exit with status 1 (failure).from __future__ import print_function
import sys
from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
try:
args.func(args)
except NotImplementedError as e:
print('NotImplementedError: {}'.format(e.message), file=sys.stderr)
sys.exit(1)
|
<commit_before>from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
args.func(args)
<commit_msg>Add special handling for NotImplementedError
If the function called by run_command() raises a NotImplementedError,
don't print the full stacktrace. Do a mild amount of custom formatting,
then exit with status 1 (failure).<commit_after>from __future__ import print_function
import sys
from globus_cli.parser.shared_parser import GlobusCLISharedParser
from globus_cli.parser.command_tree import build_command_tree
def _gen_parser():
"""
Produces a top-level argument parser built out of all of the various
subparsers for different services.
"""
# create the top parser and give it subparsers
top_level_parser = GlobusCLISharedParser()
subparsers = top_level_parser.add_subparsers(
title='Commands',
parser_class=GlobusCLISharedParser, metavar='')
build_command_tree(subparsers)
# return the created parser in all of its glory
return top_level_parser
def _load_args():
"""
Load commandline arguments, and do any necessary post-processing.
"""
parser = _gen_parser()
args = parser.parse_args()
return args
def run_command():
"""
Whatever arguments were loaded, they set a function to be invoked on the
arguments themselves -- somewhat circular, but a nifty way of passing the
args to a function that this module doesn't even know about
"""
args = _load_args()
try:
args.func(args)
except NotImplementedError as e:
print('NotImplementedError: {}'.format(e.message), file=sys.stderr)
sys.exit(1)
|
c104880b195ade7e202894de424bd8f5c1764251
|
scripts/2a-set-aircraft-poses.py
|
scripts/2a-set-aircraft-poses.py
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera)
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file (lat,lon,alt,yaw,pitch,roll)')
parser.add_argument('--pix4d', help='use the specified pix4d csv file (lat,lon,alt,roll,pitch,yaw)')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera, order='ypr')
pose_set = True
elif args.pix4d != None:
Pose.setAircraftPoses(proj, args.pix4d, order='rpy')
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
|
Add support for pix4d meta file format.
|
Add support for pix4d meta file format.
Former-commit-id: 7330412d726bf8cc4bd9d8659fb067e4921af798
|
Python
|
mit
|
UASLab/ImageAnalysis
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera)
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
Add support for pix4d meta file format.
Former-commit-id: 7330412d726bf8cc4bd9d8659fb067e4921af798
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file (lat,lon,alt,yaw,pitch,roll)')
parser.add_argument('--pix4d', help='use the specified pix4d csv file (lat,lon,alt,roll,pitch,yaw)')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera, order='ypr')
pose_set = True
elif args.pix4d != None:
Pose.setAircraftPoses(proj, args.pix4d, order='rpy')
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
|
<commit_before>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera)
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
<commit_msg>Add support for pix4d meta file format.
Former-commit-id: 7330412d726bf8cc4bd9d8659fb067e4921af798<commit_after>
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file (lat,lon,alt,yaw,pitch,roll)')
parser.add_argument('--pix4d', help='use the specified pix4d csv file (lat,lon,alt,roll,pitch,yaw)')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera, order='ypr')
pose_set = True
elif args.pix4d != None:
Pose.setAircraftPoses(proj, args.pix4d, order='rpy')
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera)
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
Add support for pix4d meta file format.
Former-commit-id: 7330412d726bf8cc4bd9d8659fb067e4921af798#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file (lat,lon,alt,yaw,pitch,roll)')
parser.add_argument('--pix4d', help='use the specified pix4d csv file (lat,lon,alt,roll,pitch,yaw)')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera, order='ypr')
pose_set = True
elif args.pix4d != None:
Pose.setAircraftPoses(proj, args.pix4d, order='rpy')
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
|
<commit_before>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera)
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
<commit_msg>Add support for pix4d meta file format.
Former-commit-id: 7330412d726bf8cc4bd9d8659fb067e4921af798<commit_after>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import Pose
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Set the aircraft poses from flight data.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--sentera', help='use the specified sentera image-metadata.txt file (lat,lon,alt,yaw,pitch,roll)')
parser.add_argument('--pix4d', help='use the specified pix4d csv file (lat,lon,alt,roll,pitch,yaw)')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
pose_set = False
if args.sentera != None:
Pose.setAircraftPoses(proj, args.sentera, order='ypr')
pose_set = True
elif args.pix4d != None:
Pose.setAircraftPoses(proj, args.pix4d, order='rpy')
pose_set = True
if not pose_set:
print "Error: no flight data specified or problem with flight data"
print "No poses computed"
exit
# compute the project's NED reference location (based on average of
# aircraft poses)
proj.compute_ned_reference_lla()
print "NED reference location:", proj.ned_reference_lla
proj.save()
|
2799b5f2cf9222313ccfe70d0663070a9950788a
|
sublimelinter/modules/php.py
|
sublimelinter/modules/php.py
|
# -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*syntax error,\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
|
# -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
|
Change PHP error matching regex
|
Change PHP error matching regex
On my system (OS X 10.7 w/stock PHP 5.3.8), the PHP lint frequently
misses errors due to over-specificity in the regex. This one is
catching them.
|
Python
|
mit
|
uschmidt83/SublimeLinter-for-ST2,SublimeLinter/SublimeLinter-for-ST2,benesch/sublime-linter,benesch/sublime-linter,uschmidt83/SublimeLinter-for-ST2,tangledhelix/SublimeLinter-for-ST2,biodamasceno/SublimeLinter-for-ST2,biodamasceno/SublimeLinter-for-ST2,tangledhelix/SublimeLinter-for-ST2,SublimeLinter/SublimeLinter-for-ST2
|
# -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*syntax error,\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
Change PHP error matching regex
On my system (OS X 10.7 w/stock PHP 5.3.8), the PHP lint frequently
misses errors due to over-specificity in the regex. This one is
catching them.
|
# -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
|
<commit_before># -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*syntax error,\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
<commit_msg>Change PHP error matching regex
On my system (OS X 10.7 w/stock PHP 5.3.8), the PHP lint frequently
misses errors due to over-specificity in the regex. This one is
catching them.<commit_after>
|
# -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
|
# -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*syntax error,\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
Change PHP error matching regex
On my system (OS X 10.7 w/stock PHP 5.3.8), the PHP lint frequently
misses errors due to over-specificity in the regex. This one is
catching them.# -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
|
<commit_before># -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*syntax error,\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
<commit_msg>Change PHP error matching regex
On my system (OS X 10.7 w/stock PHP 5.3.8), the PHP lint frequently
misses errors due to over-specificity in the regex. This one is
catching them.<commit_after># -*- coding: utf-8 -*-
# php.py - sublimelint package for checking php files
import re
from base_linter import BaseLinter
CONFIG = {
'language': 'php',
'executable': 'php',
'lint_args': ('-l', '-d display_errors=On')
}
class Linter(BaseLinter):
def parse_errors(self, view, errors, lines, errorUnderlines, violationUnderlines, warningUnderlines, errorMessages, violationMessages, warningMessages):
for line in errors.splitlines():
match = re.match(r'^Parse error:\s*(?P<error>.+?)\s+in\s+.+?\s*line\s+(?P<line>\d+)', line)
if match:
error, line = match.group('error'), match.group('line')
self.add_message(int(line), lines, error, errorMessages)
|
2473c714bb89cbd1d741b5e20b32397ffc6b6bd7
|
samp_client/constants.py
|
samp_client/constants.py
|
from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
|
from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
RCON_CMDLIST = 'cmdlist'
RCON_VARLIST = 'varlist'
RCON_EXIT = 'exit'
RCON_ECHO = 'echo'
RCON_HOSTNAME = 'hostname'
RCON_GAMEMODETEXT = 'gamemodetext'
RCON_MAPNAME = 'mapname'
RCON_EXEC = 'exec'
RCON_KICK = 'kick'
RCON_BAN = 'ban'
RCON_CHANGEMODE = 'changemode'
RCON_GMX = 'gmx'
RCON_RELOADBANS = 'reloadbans'
RCON_RELOADLOG = 'reloadlog'
RCON_SAY = 'say'
RCON_PLAYERS = 'players'
RCON_BANIP = 'banip'
RCON_UNBANIP = 'unbanip'
RCON_GRAVITY = 'gravity'
RCON_WEATHER = 'weather'
RCON_LOADFS = 'loadfs'
RCON_WEBURL = 'weburl'
RCON_UNLOADFS = 'unloadfs'
RCON_RELOADFS = 'reloadfs'
RCON_RCON_PASSWORD = 'rcon_password'
RCON_PASSWORD = 'password'
RCON_MESSAGESLIMIT = 'messageslimit'
RCON_ACKSLIMIT = 'ackslimit'
RCON_MESSAGEHOLELIMIT = 'messageholelimit'
RCON_PLAYERTIMEOUT = 'playertimeout'
RCON_LANGUAGE = 'language'
|
Define constatns for rcon commands
|
Define constatns for rcon commands
|
Python
|
mit
|
mick88/samp-client
|
from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
Define constatns for rcon commands
|
from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
RCON_CMDLIST = 'cmdlist'
RCON_VARLIST = 'varlist'
RCON_EXIT = 'exit'
RCON_ECHO = 'echo'
RCON_HOSTNAME = 'hostname'
RCON_GAMEMODETEXT = 'gamemodetext'
RCON_MAPNAME = 'mapname'
RCON_EXEC = 'exec'
RCON_KICK = 'kick'
RCON_BAN = 'ban'
RCON_CHANGEMODE = 'changemode'
RCON_GMX = 'gmx'
RCON_RELOADBANS = 'reloadbans'
RCON_RELOADLOG = 'reloadlog'
RCON_SAY = 'say'
RCON_PLAYERS = 'players'
RCON_BANIP = 'banip'
RCON_UNBANIP = 'unbanip'
RCON_GRAVITY = 'gravity'
RCON_WEATHER = 'weather'
RCON_LOADFS = 'loadfs'
RCON_WEBURL = 'weburl'
RCON_UNLOADFS = 'unloadfs'
RCON_RELOADFS = 'reloadfs'
RCON_RCON_PASSWORD = 'rcon_password'
RCON_PASSWORD = 'password'
RCON_MESSAGESLIMIT = 'messageslimit'
RCON_ACKSLIMIT = 'ackslimit'
RCON_MESSAGEHOLELIMIT = 'messageholelimit'
RCON_PLAYERTIMEOUT = 'playertimeout'
RCON_LANGUAGE = 'language'
|
<commit_before>from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
<commit_msg>Define constatns for rcon commands<commit_after>
|
from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
RCON_CMDLIST = 'cmdlist'
RCON_VARLIST = 'varlist'
RCON_EXIT = 'exit'
RCON_ECHO = 'echo'
RCON_HOSTNAME = 'hostname'
RCON_GAMEMODETEXT = 'gamemodetext'
RCON_MAPNAME = 'mapname'
RCON_EXEC = 'exec'
RCON_KICK = 'kick'
RCON_BAN = 'ban'
RCON_CHANGEMODE = 'changemode'
RCON_GMX = 'gmx'
RCON_RELOADBANS = 'reloadbans'
RCON_RELOADLOG = 'reloadlog'
RCON_SAY = 'say'
RCON_PLAYERS = 'players'
RCON_BANIP = 'banip'
RCON_UNBANIP = 'unbanip'
RCON_GRAVITY = 'gravity'
RCON_WEATHER = 'weather'
RCON_LOADFS = 'loadfs'
RCON_WEBURL = 'weburl'
RCON_UNLOADFS = 'unloadfs'
RCON_RELOADFS = 'reloadfs'
RCON_RCON_PASSWORD = 'rcon_password'
RCON_PASSWORD = 'password'
RCON_MESSAGESLIMIT = 'messageslimit'
RCON_ACKSLIMIT = 'ackslimit'
RCON_MESSAGEHOLELIMIT = 'messageholelimit'
RCON_PLAYERTIMEOUT = 'playertimeout'
RCON_LANGUAGE = 'language'
|
from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
Define constatns for rcon commandsfrom __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
RCON_CMDLIST = 'cmdlist'
RCON_VARLIST = 'varlist'
RCON_EXIT = 'exit'
RCON_ECHO = 'echo'
RCON_HOSTNAME = 'hostname'
RCON_GAMEMODETEXT = 'gamemodetext'
RCON_MAPNAME = 'mapname'
RCON_EXEC = 'exec'
RCON_KICK = 'kick'
RCON_BAN = 'ban'
RCON_CHANGEMODE = 'changemode'
RCON_GMX = 'gmx'
RCON_RELOADBANS = 'reloadbans'
RCON_RELOADLOG = 'reloadlog'
RCON_SAY = 'say'
RCON_PLAYERS = 'players'
RCON_BANIP = 'banip'
RCON_UNBANIP = 'unbanip'
RCON_GRAVITY = 'gravity'
RCON_WEATHER = 'weather'
RCON_LOADFS = 'loadfs'
RCON_WEBURL = 'weburl'
RCON_UNLOADFS = 'unloadfs'
RCON_RELOADFS = 'reloadfs'
RCON_RCON_PASSWORD = 'rcon_password'
RCON_PASSWORD = 'password'
RCON_MESSAGESLIMIT = 'messageslimit'
RCON_ACKSLIMIT = 'ackslimit'
RCON_MESSAGEHOLELIMIT = 'messageholelimit'
RCON_PLAYERTIMEOUT = 'playertimeout'
RCON_LANGUAGE = 'language'
|
<commit_before>from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
<commit_msg>Define constatns for rcon commands<commit_after>from __future__ import unicode_literals, absolute_import
# Opcode definitions
OPCODE_INFO = 'i'
OPCODE_RULES = 'r'
OPCODE_CLIENTS = 'c'
OPCODE_CLIENTS_DETAILED = 'd'
OPCODE_RCON = 'x'
OPCODE_PSEUDORANDOM = 'p'
RCON_CMDLIST = 'cmdlist'
RCON_VARLIST = 'varlist'
RCON_EXIT = 'exit'
RCON_ECHO = 'echo'
RCON_HOSTNAME = 'hostname'
RCON_GAMEMODETEXT = 'gamemodetext'
RCON_MAPNAME = 'mapname'
RCON_EXEC = 'exec'
RCON_KICK = 'kick'
RCON_BAN = 'ban'
RCON_CHANGEMODE = 'changemode'
RCON_GMX = 'gmx'
RCON_RELOADBANS = 'reloadbans'
RCON_RELOADLOG = 'reloadlog'
RCON_SAY = 'say'
RCON_PLAYERS = 'players'
RCON_BANIP = 'banip'
RCON_UNBANIP = 'unbanip'
RCON_GRAVITY = 'gravity'
RCON_WEATHER = 'weather'
RCON_LOADFS = 'loadfs'
RCON_WEBURL = 'weburl'
RCON_UNLOADFS = 'unloadfs'
RCON_RELOADFS = 'reloadfs'
RCON_RCON_PASSWORD = 'rcon_password'
RCON_PASSWORD = 'password'
RCON_MESSAGESLIMIT = 'messageslimit'
RCON_ACKSLIMIT = 'ackslimit'
RCON_MESSAGEHOLELIMIT = 'messageholelimit'
RCON_PLAYERTIMEOUT = 'playertimeout'
RCON_LANGUAGE = 'language'
|
d99e3d13ab6d10df1f90fc4383af736ef6e3234f
|
webnut/views.py
|
webnut/views.py
|
from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from webnut import WebNUT
import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
|
from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from .webnut import WebNUT
from . import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
|
Fix imports for Python 3
|
Fix imports for Python 3
|
Python
|
bsd-3-clause
|
rshipp/webNUT
|
from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from webnut import WebNUT
import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
Fix imports for Python 3
|
from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from .webnut import WebNUT
from . import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
|
<commit_before>from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from webnut import WebNUT
import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
<commit_msg>Fix imports for Python 3<commit_after>
|
from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from .webnut import WebNUT
from . import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
|
from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from webnut import WebNUT
import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
Fix imports for Python 3from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from .webnut import WebNUT
from . import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
|
<commit_before>from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from webnut import WebNUT
import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
<commit_msg>Fix imports for Python 3<commit_after>from pyramid.exceptions import NotFound
from pyramid.renderers import get_renderer
from pyramid.view import view_config
from .webnut import WebNUT
from . import config
class NUTViews(object):
def __init__(self, request):
self.request = request
renderer = get_renderer("templates/layout.pt")
self.layout = renderer.implementation().macros['layout']
self.webnut = WebNUT(config.server, config.port,
config.username, config.password)
@view_config(route_name='home', renderer='templates/index.pt')
def home(self):
return dict(title='UPS Devices',
ups_list=self.webnut.get_ups_list())
@view_config(route_name='ups_view', renderer='templates/ups_view.pt')
def ups_view(self):
ups = self.request.matchdict['ups']
try:
ups_name = self.webnut.get_ups_name(ups)
ups_vars = self.webnut.get_ups_vars(ups)
return dict(title=ups_name, ups_vars=ups_vars[0],
ups_status=ups_vars[1])
except KeyError:
raise NotFound
def notfound(request):
request.response.status = 404
return dict(title='No Such UPS')
|
da510fa88cd8444ac1838489659800c0c9708725
|
main.py
|
main.py
|
import random
import time
import os
import openpyxl
from util import util
from pyquery import PyQuery
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
|
import time
from util import util
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
|
Change the structure of directory
|
Change the structure of directory
|
Python
|
apache-2.0
|
firejq/cnki_crawler
|
import random
import time
import os
import openpyxl
from util import util
from pyquery import PyQuery
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
Change the structure of directory
|
import time
from util import util
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
|
<commit_before>import random
import time
import os
import openpyxl
from util import util
from pyquery import PyQuery
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
<commit_msg>Change the structure of directory<commit_after>
|
import time
from util import util
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
|
import random
import time
import os
import openpyxl
from util import util
from pyquery import PyQuery
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
Change the structure of directoryimport time
from util import util
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
|
<commit_before>import random
import time
import os
import openpyxl
from util import util
from pyquery import PyQuery
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
<commit_msg>Change the structure of directory<commit_after>import time
from util import util
from crawler.scraping import scraping
driver = util.getDriver('phantomjs')
# url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb&action=scdbsearch&db_opt=SCDB'
url = 'http://kns.cnki.net/kns/brief/result.aspx?dbprefix=scdb'
for author in util.get_name_list():
# try:
# driver.get(url=url)
# time.sleep(1)
# # WebDriverWait(driver, 10).until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR, '.pageBar_bottom')))
# print('页面基本加载完毕')
# except TimeoutError:
# print('timeout!')
# exit(0)
driver.get(url=url)
time.sleep(1)
scraping(driver=driver, author_name=author)
driver.quit()
print('名单上所有作者的论文信息抓取完毕')
# TODO 去重
# TODO 提高性能--多线程 & 多进程 & 异步IO
|
51701b35d9ef9401abf0d86fd5726e669326390d
|
scripts/nipy_4dto3D.py
|
scripts/nipy_4dto3D.py
|
#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import sys
import nipy.io.imageformats as nii
if __name__ == '__main__':
try:
fname = sys.argv[1]
except IndexError:
raise OSError('Expecting 4d image filename')
img = nii.load(fname)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(fname)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
|
#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import nipy.externals.argparse as argparse
import nipy.io.imageformats as nii
def main():
# create the parser
parser = argparse.ArgumentParser()
# add the arguments
parser.add_argument('filename', type=str,
help='4D image filename')
# parse the command line
args = parser.parse_args()
img = nii.load(args.filename)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(args.filename)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
if __name__ == '__main__':
main()
|
Use argparse for 4D to 3D
|
Use argparse for 4D to 3D
|
Python
|
bsd-3-clause
|
nipy/nipy-labs,arokem/nipy,bthirion/nipy,alexis-roche/register,arokem/nipy,alexis-roche/niseg,bthirion/nipy,alexis-roche/nipy,bthirion/nipy,nipy/nireg,alexis-roche/nireg,nipy/nipy-labs,alexis-roche/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/register,alexis-roche/nireg,nipy/nireg,alexis-roche/register,alexis-roche/niseg,alexis-roche/nipy,arokem/nipy,arokem/nipy
|
#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import sys
import nipy.io.imageformats as nii
if __name__ == '__main__':
try:
fname = sys.argv[1]
except IndexError:
raise OSError('Expecting 4d image filename')
img = nii.load(fname)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(fname)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
Use argparse for 4D to 3D
|
#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import nipy.externals.argparse as argparse
import nipy.io.imageformats as nii
def main():
# create the parser
parser = argparse.ArgumentParser()
# add the arguments
parser.add_argument('filename', type=str,
help='4D image filename')
# parse the command line
args = parser.parse_args()
img = nii.load(args.filename)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(args.filename)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import sys
import nipy.io.imageformats as nii
if __name__ == '__main__':
try:
fname = sys.argv[1]
except IndexError:
raise OSError('Expecting 4d image filename')
img = nii.load(fname)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(fname)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
<commit_msg>Use argparse for 4D to 3D<commit_after>
|
#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import nipy.externals.argparse as argparse
import nipy.io.imageformats as nii
def main():
# create the parser
parser = argparse.ArgumentParser()
# add the arguments
parser.add_argument('filename', type=str,
help='4D image filename')
# parse the command line
args = parser.parse_args()
img = nii.load(args.filename)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(args.filename)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import sys
import nipy.io.imageformats as nii
if __name__ == '__main__':
try:
fname = sys.argv[1]
except IndexError:
raise OSError('Expecting 4d image filename')
img = nii.load(fname)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(fname)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
Use argparse for 4D to 3D#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import nipy.externals.argparse as argparse
import nipy.io.imageformats as nii
def main():
# create the parser
parser = argparse.ArgumentParser()
# add the arguments
parser.add_argument('filename', type=str,
help='4D image filename')
# parse the command line
args = parser.parse_args()
img = nii.load(args.filename)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(args.filename)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import sys
import nipy.io.imageformats as nii
if __name__ == '__main__':
try:
fname = sys.argv[1]
except IndexError:
raise OSError('Expecting 4d image filename')
img = nii.load(fname)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(fname)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
<commit_msg>Use argparse for 4D to 3D<commit_after>#!/usr/bin/env python
''' Tiny script to write 4D files in any format that we read (nifti,
analyze, MINC, at the moment, as nifti 3D files '''
import os
import nipy.externals.argparse as argparse
import nipy.io.imageformats as nii
def main():
# create the parser
parser = argparse.ArgumentParser()
# add the arguments
parser.add_argument('filename', type=str,
help='4D image filename')
# parse the command line
args = parser.parse_args()
img = nii.load(args.filename)
imgs = nii.four_to_three(img)
froot, ext = os.path.splitext(args.filename)
if ext in ('.gz', '.bz2'):
froot, ext = os.path.splitext(froot)
for i, img3d in enumerate(imgs):
fname3d = '%s_%04d.nii' % (froot, i)
nii.save(img3d, fname3d)
if __name__ == '__main__':
main()
|
9feee57aa6f8ebc5c5300d2b611aebdc87367c70
|
selectable/__init__.py
|
selectable/__init__.py
|
"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
|
"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 3,
'micro': 0,
'releaselevel': 'dev',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
|
Change version to reflect current dev status.
|
Change version to reflect current dev status.
|
Python
|
bsd-2-clause
|
mlavin/django-selectable,makinacorpus/django-selectable,affan2/django-selectable,affan2/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable,makinacorpus/django-selectable
|
"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
Change version to reflect current dev status.
|
"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 3,
'micro': 0,
'releaselevel': 'dev',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
|
<commit_before>"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
<commit_msg>Change version to reflect current dev status.<commit_after>
|
"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 3,
'micro': 0,
'releaselevel': 'dev',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
|
"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
Change version to reflect current dev status."""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 3,
'micro': 0,
'releaselevel': 'dev',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
|
<commit_before>"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
<commit_msg>Change version to reflect current dev status.<commit_after>"""
django-selectable is a collection of tools and widgets for using/creating auto-complete selection widgets using Django and jQuery UI.
"""
__version_info__ = {
'major': 0,
'minor': 3,
'micro': 0,
'releaselevel': 'dev',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
|
e74acac98e7cb6a5fbdcba43fcca33633b55a5db
|
byceps/util/system.py
|
byceps/util/system.py
|
# -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit()
|
# -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit(1)
|
Exit with status code 1 if retrieval of the configuration filename from the environment fails
|
Exit with status code 1 if retrieval of the configuration filename from the environment fails
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps
|
# -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit()
Exit with status code 1 if retrieval of the configuration filename from the environment fails
|
# -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit(1)
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit()
<commit_msg>Exit with status code 1 if retrieval of the configuration filename from the environment fails<commit_after>
|
# -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit(1)
|
# -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit()
Exit with status code 1 if retrieval of the configuration filename from the environment fails# -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit(1)
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit()
<commit_msg>Exit with status code 1 if retrieval of the configuration filename from the environment fails<commit_after># -*- coding: utf-8 -*-
"""
byceps.util.system
~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import os
import sys
CONFIG_VAR_NAME = 'BYCEPS_CONFIG'
def get_config_filename_from_env():
"""Return the configuration filename set via environment variable.
Raise an exception if it isn't set.
"""
env = os.environ.get(CONFIG_VAR_NAME)
if not env:
raise Exception(
"No configuration file was specified via the '{}' "
"environment variable.".format(CONFIG_VAR_NAME))
return env
def get_config_filename_from_env_or_exit():
"""Return the configuration filename set via environment variable.
Exit if it isn't set.
"""
try:
return get_config_filename_from_env()
except Exception as e:
sys.stderr.write("{}\n".format(e))
sys.exit(1)
|
3220b356297ec5fe61888a906543d0ee993f9f31
|
website/tests/test_database.py
|
website/tests/test_database.py
|
import database
def test_encode_csv():
attributes = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
'+', 'R', 'H', 204, 'exon1', 123, False
)
result = database.encode_csv(*attributes)
assert result == '+RH0cc:exon1:7b'
def test_decode_csv():
encoded_csv = '+RH0cc:exon1:7b'
result = database.decode_csv(encoded_csv)
assert result == dict(zip(
('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm'),
('+', 'R', 'H', 68, 204, 'exon1', 123, False)
))
|
import database
def test_encode_csv():
test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
(('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
(('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
for attributes, correct_result in test_data:
result = database.encode_csv(*attributes)
assert result == correct_result
def test_decode_csv():
keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
test_data = (
('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
)
for encoded_csv, correct_result in test_data:
result = database.decode_csv(encoded_csv)
assert result == dict(zip(keys, correct_result))
|
Add more tests to database
|
Add more tests to database
|
Python
|
lgpl-2.1
|
reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB
|
import database
def test_encode_csv():
attributes = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
'+', 'R', 'H', 204, 'exon1', 123, False
)
result = database.encode_csv(*attributes)
assert result == '+RH0cc:exon1:7b'
def test_decode_csv():
encoded_csv = '+RH0cc:exon1:7b'
result = database.decode_csv(encoded_csv)
assert result == dict(zip(
('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm'),
('+', 'R', 'H', 68, 204, 'exon1', 123, False)
))
Add more tests to database
|
import database
def test_encode_csv():
test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
(('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
(('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
for attributes, correct_result in test_data:
result = database.encode_csv(*attributes)
assert result == correct_result
def test_decode_csv():
keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
test_data = (
('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
)
for encoded_csv, correct_result in test_data:
result = database.decode_csv(encoded_csv)
assert result == dict(zip(keys, correct_result))
|
<commit_before>import database
def test_encode_csv():
attributes = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
'+', 'R', 'H', 204, 'exon1', 123, False
)
result = database.encode_csv(*attributes)
assert result == '+RH0cc:exon1:7b'
def test_decode_csv():
encoded_csv = '+RH0cc:exon1:7b'
result = database.decode_csv(encoded_csv)
assert result == dict(zip(
('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm'),
('+', 'R', 'H', 68, 204, 'exon1', 123, False)
))
<commit_msg>Add more tests to database<commit_after>
|
import database
def test_encode_csv():
test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
(('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
(('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
for attributes, correct_result in test_data:
result = database.encode_csv(*attributes)
assert result == correct_result
def test_decode_csv():
keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
test_data = (
('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
)
for encoded_csv, correct_result in test_data:
result = database.decode_csv(encoded_csv)
assert result == dict(zip(keys, correct_result))
|
import database
def test_encode_csv():
attributes = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
'+', 'R', 'H', 204, 'exon1', 123, False
)
result = database.encode_csv(*attributes)
assert result == '+RH0cc:exon1:7b'
def test_decode_csv():
encoded_csv = '+RH0cc:exon1:7b'
result = database.decode_csv(encoded_csv)
assert result == dict(zip(
('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm'),
('+', 'R', 'H', 68, 204, 'exon1', 123, False)
))
Add more tests to databaseimport database
def test_encode_csv():
test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
(('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
(('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
for attributes, correct_result in test_data:
result = database.encode_csv(*attributes)
assert result == correct_result
def test_decode_csv():
keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
test_data = (
('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
)
for encoded_csv, correct_result in test_data:
result = database.decode_csv(encoded_csv)
assert result == dict(zip(keys, correct_result))
|
<commit_before>import database
def test_encode_csv():
attributes = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
'+', 'R', 'H', 204, 'exon1', 123, False
)
result = database.encode_csv(*attributes)
assert result == '+RH0cc:exon1:7b'
def test_decode_csv():
encoded_csv = '+RH0cc:exon1:7b'
result = database.decode_csv(encoded_csv)
assert result == dict(zip(
('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm'),
('+', 'R', 'H', 68, 204, 'exon1', 123, False)
))
<commit_msg>Add more tests to database<commit_after>import database
def test_encode_csv():
test_data = (
# strand, ref, alt, cdna_pos, exon, protein_id, is_ptm
(('+', 'R', 'H', 204, 'exon1', 123, False), '+RH0cc:exon1:7b'),
(('-', 'R', 'H', 204, 'exon1', 123, True), '-RH1cc:exon1:7b'),
)
for attributes, correct_result in test_data:
result = database.encode_csv(*attributes)
assert result == correct_result
def test_decode_csv():
keys = ('strand', 'ref', 'alt', 'pos', 'cdna_pos', 'exon', 'protein_id', 'is_ptm')
test_data = (
('+RH0cc:exon1:7b', ('+', 'R', 'H', 68, 204, 'exon1', 123, False)),
('-RH1cc:exon1:7b', ('-', 'R', 'H', 68, 204, 'exon1', 123, True)),
)
for encoded_csv, correct_result in test_data:
result = database.decode_csv(encoded_csv)
assert result == dict(zip(keys, correct_result))
|
6398f7ad03aaa8547eaa860ba7ef5d2051ca2955
|
src/newspeak/urls.py
|
src/newspeak/urls.py
|
from django.conf.urls import patterns, url
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
(r'^$', 'django.views.generic.simple.redirect_to',
{'url': '/all/rss/', 'permanent': False}
),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
url(r'^$', RedirectView.as_view(
url='/all/rss/', permanent=False
)),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
|
Use class-based generic view for redirect.
|
Use class-based generic view for redirect.
Fixes #29; legacy method-based generic views are deprecated in Django.
|
Python
|
bsd-3-clause
|
bitsoffreedom/newspeak
|
from django.conf.urls import patterns, url
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
(r'^$', 'django.views.generic.simple.redirect_to',
{'url': '/all/rss/', 'permanent': False}
),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
Use class-based generic view for redirect.
Fixes #29; legacy method-based generic views are deprecated in Django.
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
url(r'^$', RedirectView.as_view(
url='/all/rss/', permanent=False
)),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
|
<commit_before>from django.conf.urls import patterns, url
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
(r'^$', 'django.views.generic.simple.redirect_to',
{'url': '/all/rss/', 'permanent': False}
),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
<commit_msg>Use class-based generic view for redirect.
Fixes #29; legacy method-based generic views are deprecated in Django.<commit_after>
|
from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
url(r'^$', RedirectView.as_view(
url='/all/rss/', permanent=False
)),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
|
from django.conf.urls import patterns, url
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
(r'^$', 'django.views.generic.simple.redirect_to',
{'url': '/all/rss/', 'permanent': False}
),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
Use class-based generic view for redirect.
Fixes #29; legacy method-based generic views are deprecated in Django.from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
url(r'^$', RedirectView.as_view(
url='/all/rss/', permanent=False
)),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
|
<commit_before>from django.conf.urls import patterns, url
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
(r'^$', 'django.views.generic.simple.redirect_to',
{'url': '/all/rss/', 'permanent': False}
),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
<commit_msg>Use class-based generic view for redirect.
Fixes #29; legacy method-based generic views are deprecated in Django.<commit_after>from django.conf.urls import patterns, url
from django.views.generic.base import RedirectView
from surlex.dj import surl
from .feeds import NewspeakRSSFeed, NewspeakAtomFeed
urlpatterns = patterns('',
# surl(r'^$', SomeView.as_view(),
# name='newspeak_home'
# ),)
# Static redirect to the RSS feed, until we have a
# page to show here.
url(r'^$', RedirectView.as_view(
url='/all/rss/', permanent=False
)),
url(r'^all/rss/$', NewspeakRSSFeed(), name='rss_all'),
url(r'^all/atom/$', NewspeakAtomFeed(), name='atom_all'),
)
|
bb5748b85829149f8f2f02e139ad2fa7389d38ff
|
examples/download_observations.py
|
examples/download_observations.py
|
from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
|
from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.formats import VisualFormatWriter
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
filename = '%s.txt' % observer_code
with open(filename, 'wb') as fp:
writer = VisualFormatWriter(fp, observer_code)
for observation in observations:
writer.writerow(observation)
print('Observations written to file %s.' % filename)
|
Write downloaded observations into a Visual Format file.
|
Write downloaded observations into a Visual Format file.
|
Python
|
mit
|
zsiciarz/pyaavso
|
from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
Write downloaded observations into a Visual Format file.
|
from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.formats import VisualFormatWriter
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
filename = '%s.txt' % observer_code
with open(filename, 'wb') as fp:
writer = VisualFormatWriter(fp, observer_code)
for observation in observations:
writer.writerow(observation)
print('Observations written to file %s.' % filename)
|
<commit_before>from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
<commit_msg>Write downloaded observations into a Visual Format file.<commit_after>
|
from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.formats import VisualFormatWriter
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
filename = '%s.txt' % observer_code
with open(filename, 'wb') as fp:
writer = VisualFormatWriter(fp, observer_code)
for observation in observations:
writer.writerow(observation)
print('Observations written to file %s.' % filename)
|
from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
Write downloaded observations into a Visual Format file.from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.formats import VisualFormatWriter
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
filename = '%s.txt' % observer_code
with open(filename, 'wb') as fp:
writer = VisualFormatWriter(fp, observer_code)
for observation in observations:
writer.writerow(observation)
print('Observations written to file %s.' % filename)
|
<commit_before>from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
<commit_msg>Write downloaded observations into a Visual Format file.<commit_after>from __future__ import unicode_literals, print_function
import sys
import logging
from pyaavso.formats import VisualFormatWriter
from pyaavso.utils import download_observations
if __name__ == '__main__':
# configure logging so we can see some informational output
logger = logging.getLogger('pyaavso.utils')
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
try:
observer_code = sys.argv[1]
except IndexError:
print('Usage: python download_observations.py <OBSERVER_CODE>')
else:
observations = download_observations(observer_code)
print('All done.\nDownloaded %d observations.' % len(observations))
filename = '%s.txt' % observer_code
with open(filename, 'wb') as fp:
writer = VisualFormatWriter(fp, observer_code)
for observation in observations:
writer.writerow(observation)
print('Observations written to file %s.' % filename)
|
090a11c08839eae78e0ca6ec963b66ac3876ba35
|
circuits/web/events.py
|
circuits/web/events.py
|
# Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
success = True
failure = True
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
success = True
failure = True
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
success = True
failure = True
|
# Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
success = True
failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
|
Move success/failure properties into WebEvent base class.
|
Move success/failure properties into WebEvent base class.
|
Python
|
mit
|
nizox/circuits,eriol/circuits,eriol/circuits,treemo/circuits,eriol/circuits,treemo/circuits,treemo/circuits
|
# Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
success = True
failure = True
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
success = True
failure = True
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
success = True
failure = True
Move success/failure properties into WebEvent base class.
|
# Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
success = True
failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
|
<commit_before># Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
success = True
failure = True
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
success = True
failure = True
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
success = True
failure = True
<commit_msg>Move success/failure properties into WebEvent base class.<commit_after>
|
# Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
success = True
failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
|
# Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
success = True
failure = True
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
success = True
failure = True
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
success = True
failure = True
Move success/failure properties into WebEvent base class.# Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
success = True
failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
|
<commit_before># Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
success = True
failure = True
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
success = True
failure = True
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
success = True
failure = True
<commit_msg>Move success/failure properties into WebEvent base class.<commit_after># Module: events
# Date: 3rd February 2009
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Events
This module implements the necessary Events needed.
"""
from circuits import Event
class WebEvent(Event):
channels = ("web",)
success = True
failure = True
class Request(WebEvent):
"""Request(WebEvent) -> Request WebEvent
args: request, response
"""
class Response(WebEvent):
"""Response(WebEvent) -> Response WebEvent
args: request, response
"""
class Stream(WebEvent):
"""Stream(WebEvent) -> Stream WebEvent
args: request, response
"""
|
3222f720cc46a0e21f2ae5b2a9e8d4695c71a24e
|
changes/api/author_build_index.py
|
changes/api/author_build_index.py
|
from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
Add source to author build index query
|
Add source to author build index query
|
Python
|
apache-2.0
|
dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes
|
from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
Add source to author build index query
|
from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
<commit_msg>Add source to author build index query<commit_after>
|
from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
Add source to author build index queryfrom __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
<commit_msg>Add source to author build index query<commit_after>from __future__ import absolute_import, division, unicode_literals
from flask import session
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
if not session.get('email'):
return
return Author.query.filter_by(email=session['email']).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
bf46826e2e81ee071350c69ccc136ccedff330aa
|
UCP/news_event/api.py
|
UCP/news_event/api.py
|
"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(viewsets.ModelViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(viewsets.ModelViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
|
"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
|
Switch from modelviewset to mixins
|
Switch from modelviewset to mixins
|
Python
|
bsd-3-clause
|
BuildmLearn/University-Campus-Portal-UCP,BuildmLearn/University-Campus-Portal-UCP,BuildmLearn/University-Campus-Portal-UCP
|
"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(viewsets.ModelViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(viewsets.ModelViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
Switch from modelviewset to mixins
|
"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
|
<commit_before>"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(viewsets.ModelViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(viewsets.ModelViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
<commit_msg>Switch from modelviewset to mixins<commit_after>
|
"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
|
"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(viewsets.ModelViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(viewsets.ModelViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
Switch from modelviewset to mixins"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
|
<commit_before>"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(viewsets.ModelViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(viewsets.ModelViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
<commit_msg>Switch from modelviewset to mixins<commit_after>"""
API file for news and event app
consists of the news list, detail and add api
events list, detail and add api
"""
from django.utils import timezone
from rest_framework import status, mixins
from rest_framework.authentication import TokenAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from rest_framework import viewsets
from news_event.models import News, Event
from news_event.serializers import NewsSerializer, EventSerializer
class NewsViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = News.objects.all()
serializer_class = NewsSerializer
class EventViewSet(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
queryset = Event.objects.all()
serializer_class = EventSerializer
|
43103f59b4409ef15913d0394327d25959721afa
|
bin/trigger_upload.py
|
bin/trigger_upload.py
|
#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
|
#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
|
Move logic inside the main function
|
scripts: Move logic inside the main function
Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>
|
Python
|
agpl-3.0
|
fedora-infra/fedimg,fedora-infra/fedimg
|
#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
scripts: Move logic inside the main function
Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>
|
#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
|
<commit_before>#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
<commit_msg>scripts: Move logic inside the main function
Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com><commit_after>
|
#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
|
#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
scripts: Move logic inside the main function
Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
|
<commit_before>#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
<commit_msg>scripts: Move logic inside the main function
Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com><commit_after>#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
|
7711e9d04d81c4b948599f7454b87274a8f5ad9e
|
src/py3flowtools/flowd_wrapper.py
|
src/py3flowtools/flowd_wrapper.py
|
# flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(file_path)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
|
# flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
ERR_MSG = 'Could not extract data from {}'
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
raise IOError(ERR_MSG.format(file_path))
line = None
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
else:
if line is None:
raise IOError(ERR_MSG.format(file_path))
|
Fix up error handling for flowd
|
Fix up error handling for flowd
|
Python
|
mit
|
bbayles/py3flowtools
|
# flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(file_path)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
Fix up error handling for flowd
|
# flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
ERR_MSG = 'Could not extract data from {}'
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
raise IOError(ERR_MSG.format(file_path))
line = None
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
else:
if line is None:
raise IOError(ERR_MSG.format(file_path))
|
<commit_before># flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(file_path)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
<commit_msg>Fix up error handling for flowd<commit_after>
|
# flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
ERR_MSG = 'Could not extract data from {}'
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
raise IOError(ERR_MSG.format(file_path))
line = None
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
else:
if line is None:
raise IOError(ERR_MSG.format(file_path))
|
# flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(file_path)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
Fix up error handling for flowd# flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
ERR_MSG = 'Could not extract data from {}'
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
raise IOError(ERR_MSG.format(file_path))
line = None
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
else:
if line is None:
raise IOError(ERR_MSG.format(file_path))
|
<commit_before># flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
msg = 'Could not extract data from {}'.format(file_path)
raise IOError(msg)
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
<commit_msg>Fix up error handling for flowd<commit_after># flowtools_wrapper.py
# Copyright 2014 Bo Bayles (bbayles@gmail.com)
# See http://github.com/bbayles/py3flowtools for documentation and license
from __future__ import division, print_function, unicode_literals
import io
import os
import sys
from .flow_line import FlowLine
if sys.version_info.major < 3:
import subprocess32 as subprocess
else:
import subprocess
ERR_MSG = 'Could not extract data from {}'
FLOWD_READER_ARGS = [
'flowd-reader',
'-v', # Verbose output
'-c', # CSV formatting
'-U', # UTC timestamps
'{file_path:}'
]
def FlowLog(file_path):
args = FLOWD_READER_ARGS[:]
args[-1] = file_path
with io.open(os.devnull, mode='wb') as DEVNULL:
with subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=DEVNULL
) as proc:
iterator = iter(proc.stdout.readline, b'')
try:
# Skip the headers
next(iterator)
next(iterator)
except StopIteration:
raise IOError(ERR_MSG.format(file_path))
line = None
for line in iterator:
parsed_line = FlowLine(line)
yield parsed_line
else:
if line is None:
raise IOError(ERR_MSG.format(file_path))
|
423243bce63da26ca4c5ea784376488ea8997873
|
reg/__init__.py
|
reg/__init__.py
|
# flake8: noqa
#from .registry import Registry, CachingKeyLookup, Lookup
from .dispatch import dispatch, dispatch_method, auto_methodify
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
|
# flake8: noqa
from .dispatch import (dispatch, dispatch_method,
auto_methodify, clean_dispatch_methods)
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
|
Clean dispatch methods exposed to api.
|
Clean dispatch methods exposed to api.
|
Python
|
bsd-3-clause
|
taschini/reg,morepath/reg
|
# flake8: noqa
#from .registry import Registry, CachingKeyLookup, Lookup
from .dispatch import dispatch, dispatch_method, auto_methodify
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
Clean dispatch methods exposed to api.
|
# flake8: noqa
from .dispatch import (dispatch, dispatch_method,
auto_methodify, clean_dispatch_methods)
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
|
<commit_before># flake8: noqa
#from .registry import Registry, CachingKeyLookup, Lookup
from .dispatch import dispatch, dispatch_method, auto_methodify
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
<commit_msg>Clean dispatch methods exposed to api.<commit_after>
|
# flake8: noqa
from .dispatch import (dispatch, dispatch_method,
auto_methodify, clean_dispatch_methods)
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
|
# flake8: noqa
#from .registry import Registry, CachingKeyLookup, Lookup
from .dispatch import dispatch, dispatch_method, auto_methodify
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
Clean dispatch methods exposed to api.# flake8: noqa
from .dispatch import (dispatch, dispatch_method,
auto_methodify, clean_dispatch_methods)
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
|
<commit_before># flake8: noqa
#from .registry import Registry, CachingKeyLookup, Lookup
from .dispatch import dispatch, dispatch_method, auto_methodify
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
<commit_msg>Clean dispatch methods exposed to api.<commit_after># flake8: noqa
from .dispatch import (dispatch, dispatch_method,
auto_methodify, clean_dispatch_methods)
from .mapply import mapply
from .arginfo import arginfo
from .argextract import KeyExtractor
from .sentinel import Sentinel, NOT_FOUND
from .error import RegistrationError, KeyExtractorError
from .predicate import (Predicate, PredicateRegistry, KeyIndex, ClassIndex,
key_predicate, class_predicate,
match_key, match_instance, match_argname,
match_class, CachingKeyLookup, Lookup)
|
b706c8130c8f9aa56a78338a078a737fbb8ca28d
|
run.py
|
run.py
|
from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run(debug=True)
|
from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run()
|
Remove debug mode default setting of true
|
Remove debug mode default setting of true
|
Python
|
mit
|
JoshuaOndieki/buckylist,JoshuaOndieki/buckylist
|
from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run(debug=True)
Remove debug mode default setting of true
|
from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run()
|
<commit_before>from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Remove debug mode default setting of true<commit_after>
|
from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run()
|
from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run(debug=True)
Remove debug mode default setting of truefrom bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run()
|
<commit_before>from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Remove debug mode default setting of true<commit_after>from bucky import create_app
app = create_app('development')
if __name__ == "__main__":
app.run()
|
eac0b6cb28e86b43d6459d631f10fd3d7a7b2287
|
cli/hdfs.py
|
cli/hdfs.py
|
#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=5):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
|
#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=0):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
if count == 0:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null' % locals()
else:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
|
Change default value for line number
|
Change default value for line number
|
Python
|
apache-2.0
|
dongjoon-hyun/tools,dongjoon-hyun/tools,dongjoon-hyun/tools,dongjoon-hyun/tools,dongjoon-hyun/tools
|
#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=5):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
Change default value for line number
|
#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=0):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
if count == 0:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null' % locals()
else:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
|
<commit_before>#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=5):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
<commit_msg>Change default value for line number<commit_after>
|
#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=0):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
if count == 0:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null' % locals()
else:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
|
#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=5):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
Change default value for line number#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=0):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
if count == 0:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null' % locals()
else:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
|
<commit_before>#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=5):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
<commit_msg>Change default value for line number<commit_after>#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
"""
Intelligence Platform CLI Fabric File
"""
__author__ = 'Dongjoon Hyun (dongjoon@apache.org)'
__license__ = 'Apache License'
__version__ = '0.2'
from fabric.api import *
@task
def ls(inpath='/'):
"""
fab hdfs.ls:/sample
"""
cmd = '/usr/bin/hadoop fs -ls %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def count(inpath):
"""
fab hdfs.count:/data/text/newsgroup
"""
cmd = '/usr/bin/hadoop fs -count %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def du(inpath):
"""
fab hdfs.du:/sample
"""
cmd = '/usr/bin/hadoop fs -du -h %(inpath)s 2> /dev/null' % locals()
run(cmd)
@task
def text(inpath, count=0):
"""
fab hdfs.text:/sample/hani_news.head.txt.gz,5
"""
if count == 0:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null' % locals()
else:
cmd = '/usr/bin/hadoop fs -text %(inpath)s 2> /dev/null | head -n %(count)s' % locals()
run(cmd)
|
94c149f950a24a5034082c9b177037307f9ed809
|
hdfs.py
|
hdfs.py
|
import sys, os
import shlex, subprocess
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..."
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
print "Done getting %s..."
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..."
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
print "Done putting %s..."
|
import sys, os
import shlex, subprocess
import time
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..." % (hdfs_path)
start = time.time()
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done getting %s, took %d seconds" % (hdfs_path, end - start)
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..." % (local_path)
start = time.time()
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done putting %s, took %d seconds" % (local_path, end - start)
|
Add some timing and fix debug output from HDFS client.
|
Add some timing and fix debug output from HDFS client.
|
Python
|
mit
|
ms705/napper
|
import sys, os
import shlex, subprocess
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..."
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
print "Done getting %s..."
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..."
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
print "Done putting %s..."
Add some timing and fix debug output from HDFS client.
|
import sys, os
import shlex, subprocess
import time
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..." % (hdfs_path)
start = time.time()
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done getting %s, took %d seconds" % (hdfs_path, end - start)
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..." % (local_path)
start = time.time()
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done putting %s, took %d seconds" % (local_path, end - start)
|
<commit_before>import sys, os
import shlex, subprocess
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..."
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
print "Done getting %s..."
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..."
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
print "Done putting %s..."
<commit_msg>Add some timing and fix debug output from HDFS client.<commit_after>
|
import sys, os
import shlex, subprocess
import time
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..." % (hdfs_path)
start = time.time()
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done getting %s, took %d seconds" % (hdfs_path, end - start)
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..." % (local_path)
start = time.time()
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done putting %s, took %d seconds" % (local_path, end - start)
|
import sys, os
import shlex, subprocess
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..."
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
print "Done getting %s..."
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..."
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
print "Done putting %s..."
Add some timing and fix debug output from HDFS client.import sys, os
import shlex, subprocess
import time
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..." % (hdfs_path)
start = time.time()
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done getting %s, took %d seconds" % (hdfs_path, end - start)
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..." % (local_path)
start = time.time()
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done putting %s, took %d seconds" % (local_path, end - start)
|
<commit_before>import sys, os
import shlex, subprocess
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..."
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
print "Done getting %s..."
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..."
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
print "Done putting %s..."
<commit_msg>Add some timing and fix debug output from HDFS client.<commit_after>import sys, os
import shlex, subprocess
import time
def hdfs_fetch_file(hdfs_path, local_path):
print "Getting %s..." % (hdfs_path)
start = time.time()
command = "hadoop fs -get %s %s" % (hdfs_path, local_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done getting %s, took %d seconds" % (hdfs_path, end - start)
def hdfs_push_file(local_path, hdfs_path):
print "Putting %s..." % (local_path)
start = time.time()
command = "hadoop fs -put %s %s" % (local_path, hdfs_path)
subprocess.call(shlex.split(command))
end = time.time()
print "Done putting %s, took %d seconds" % (local_path, end - start)
|
f94e7db48e6a3fe51d5ccc898d2a1e6de1d101c6
|
urls.py
|
urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
if settings.DEBUG: # otherwise, they should be served by a webserver like apache
urlpatterns += patterns(
'',
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'})
)
|
Test DEBUG to serve static files by django
|
Test DEBUG to serve static files by django
|
Python
|
apache-2.0
|
shirlei/helios-server,shirlei/helios-server,shirlei/helios-server,shirlei/helios-server,shirlei/helios-server
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
Test DEBUG to serve static files by django
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
if settings.DEBUG: # otherwise, they should be served by a webserver like apache
urlpatterns += patterns(
'',
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'})
)
|
<commit_before># -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
<commit_msg>Test DEBUG to serve static files by django<commit_after>
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
if settings.DEBUG: # otherwise, they should be served by a webserver like apache
urlpatterns += patterns(
'',
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'})
)
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
Test DEBUG to serve static files by django# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
if settings.DEBUG: # otherwise, they should be served by a webserver like apache
urlpatterns += patterns(
'',
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'})
)
|
<commit_before># -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'}),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
<commit_msg>Test DEBUG to serve static files by django<commit_after># -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^auth/', include('helios_auth.urls')),
(r'^helios/', include('helios.urls')),
(r'^', include('server_ui.urls')),
(r'^admin/', include(admin.site.urls))
)
if settings.DEBUG: # otherwise, they should be served by a webserver like apache
urlpatterns += patterns(
'',
# SHOULD BE REPLACED BY APACHE STATIC PATH
(r'booth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosbooth'}),
(r'verifier/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/heliosverifier'}),
(r'static/auth/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios_auth/media'}),
(r'static/helios/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/helios/media'}),
(r'static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.ROOT_PATH + '/server_ui/media'})
)
|
ee362795318507b757795e0be4c45d68c17cd28f
|
roll.py
|
roll.py
|
#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
return randrange(1, int(diefaces + 1))
|
#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# roll v1.0
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
assert isinstance(diefaces, int) and diefaces >= 1
return randrange(1, int(diefaces + 1))
|
Add assert to prevent invalid input
|
Add assert to prevent invalid input
|
Python
|
agpl-3.0
|
TechnologyClassroom/dice-mechanic-sim,TechnologyClassroom/dice-mechanic-sim
|
#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
return randrange(1, int(diefaces + 1))
Add assert to prevent invalid input
|
#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# roll v1.0
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
assert isinstance(diefaces, int) and diefaces >= 1
return randrange(1, int(diefaces + 1))
|
<commit_before>#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
return randrange(1, int(diefaces + 1))
<commit_msg>Add assert to prevent invalid input<commit_after>
|
#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# roll v1.0
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
assert isinstance(diefaces, int) and diefaces >= 1
return randrange(1, int(diefaces + 1))
|
#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
return randrange(1, int(diefaces + 1))
Add assert to prevent invalid input#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# roll v1.0
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
assert isinstance(diefaces, int) and diefaces >= 1
return randrange(1, int(diefaces + 1))
|
<commit_before>#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
return randrange(1, int(diefaces + 1))
<commit_msg>Add assert to prevent invalid input<commit_after>#!/usr/bin/env python
"""roll simulates rolling polyhedral dice."""
# roll.py
# roll v1.0
# Michael McMahon
from random import randrange
# Die roll function
# This function rolls polyhedral dice. Example: To roll a d8, use roll(8).
def roll(diefaces):
"""Simulate rolling polyhedral dice"""
assert isinstance(diefaces, int) and diefaces >= 1
return randrange(1, int(diefaces + 1))
|
f7b592d44bd6586cea34ff7262a874142802fb84
|
Python/setup.py
|
Python/setup.py
|
"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='0.3',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
|
"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='3.2',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
|
Make qipype and QUIT version numbers match
|
Make qipype and QUIT version numbers match
|
Python
|
mpl-2.0
|
spinicist/QUIT,spinicist/QUIT,spinicist/QUIT,spinicist/QUIT
|
"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='0.3',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
Make qipype and QUIT version numbers match
|
"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='3.2',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
|
<commit_before>"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='0.3',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
<commit_msg>Make qipype and QUIT version numbers match<commit_after>
|
"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='3.2',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
|
"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='0.3',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
Make qipype and QUIT version numbers match"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='3.2',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
|
<commit_before>"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='0.3',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
<commit_msg>Make qipype and QUIT version numbers match<commit_after>"""Setup file to Not Another Neuroimaging Slicer
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.abspath(path.join(here, 'README.md'))) as f:
long_description = f.read()
setup(
name='qipype',
version='3.2',
description='nipype interfaces to QUantitative Imaging Tools',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/spinicist/quit',
author='Tobias Wood',
author_email='tobias@spinicist.org.uk',
py_modules=['qipype'],
install_requires=['nipype>=1.2.3',
'nibabel>=2.5.1'],
python_requires='>=3',
license='MPL',
classifiers=['Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 3',
],
keywords='neuroimaging mri',
packages=find_packages()
)
|
478f651184a84fae36959e8dfa1fa1c9ebb01f09
|
onepercentclub/settings/travis.py
|
onepercentclub/settings/travis.py
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
Use FF as test browser in Travis
|
Use FF as test browser in Travis
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
Use FF as test browser in Travis
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
<commit_before># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
<commit_msg>Use FF as test browser in Travis<commit_after>
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
Use FF as test browser in Travis# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
<commit_before># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
<commit_msg>Use FF as test browser in Travis<commit_after># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
59c29cbf7f41221b412253ec1f0444496bd934fa
|
tube.py
|
tube.py
|
"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
"""Run test cases in the tests/ directory."""
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
|
"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
|
Remove dirty lies from doctstring
|
Remove dirty lies from doctstring
This was a leftover from wherever I originally copied this config from.
|
Python
|
mit
|
blaix/tdubs
|
"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
"""Run test cases in the tests/ directory."""
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
Remove dirty lies from doctstring
This was a leftover from wherever I originally copied this config from.
|
"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
|
<commit_before>"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
"""Run test cases in the tests/ directory."""
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
<commit_msg>Remove dirty lies from doctstring
This was a leftover from wherever I originally copied this config from.<commit_after>
|
"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
|
"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
"""Run test cases in the tests/ directory."""
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
Remove dirty lies from doctstring
This was a leftover from wherever I originally copied this config from."""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
|
<commit_before>"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
"""Run test cases in the tests/ directory."""
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
<commit_msg>Remove dirty lies from doctstring
This was a leftover from wherever I originally copied this config from.<commit_after>"""Configuration for testtube.
Automatically run tests when files change by running: stir
See: https://github.com/thomasw/testtube
For flake8, don't forget to install:
* flake8-quotes
"""
from testtube.helpers import Flake8, Helper, Nosetests
class ScreenClearer(Helper):
command = 'clear'
def success(self, *args):
pass
class Isort(Helper):
command = 'isort'
def get_args(self):
return ['--check']
class UnitTests(Nosetests):
def get_args(self, *args, **kwargs):
return ['-x', '--with-doctest', '--doctest-options=+ELLIPSIS',
'--doctest-extension=rst']
clear = ScreenClearer(all_files=True)
lint_style = Flake8(all_files=True)
unit_tests = UnitTests(all_files=True)
PATTERNS = (
(r'.*\.(py|rst)$', [clear, unit_tests], {'fail_fast': True}),
(r'.*\.py$', [lint_style], {'fail_fast': True}),
)
|
2c6ff2b65ea291816221fe996fb282c2c4a74dd7
|
install_steps/create_bosh_cert.py
|
install_steps/create_bosh_cert.py
|
def do_step(context):
call("mkdir -p ./bosh", shell=True)
call("mkdir -p ./bosh/manifests", shell=True)
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
call("cp bosh.key ./bosh/bosh", shell=True)
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
|
from subprocess import call
from os import makedirs
from shutil import copy
def do_step(context):
makedirs("bosh/manifests")
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
copy("bosh.key", "./bosh/bosh")
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
|
Use python libs to do file operations
|
Use python libs to do file operations
|
Python
|
apache-2.0
|
cf-platform-eng/bosh-azure-template,cf-platform-eng/bosh-azure-template
|
def do_step(context):
call("mkdir -p ./bosh", shell=True)
call("mkdir -p ./bosh/manifests", shell=True)
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
call("cp bosh.key ./bosh/bosh", shell=True)
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
Use python libs to do file operations
|
from subprocess import call
from os import makedirs
from shutil import copy
def do_step(context):
makedirs("bosh/manifests")
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
copy("bosh.key", "./bosh/bosh")
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
|
<commit_before>def do_step(context):
call("mkdir -p ./bosh", shell=True)
call("mkdir -p ./bosh/manifests", shell=True)
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
call("cp bosh.key ./bosh/bosh", shell=True)
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
<commit_msg>Use python libs to do file operations<commit_after>
|
from subprocess import call
from os import makedirs
from shutil import copy
def do_step(context):
makedirs("bosh/manifests")
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
copy("bosh.key", "./bosh/bosh")
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
|
def do_step(context):
call("mkdir -p ./bosh", shell=True)
call("mkdir -p ./bosh/manifests", shell=True)
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
call("cp bosh.key ./bosh/bosh", shell=True)
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
Use python libs to do file operationsfrom subprocess import call
from os import makedirs
from shutil import copy
def do_step(context):
makedirs("bosh/manifests")
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
copy("bosh.key", "./bosh/bosh")
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
|
<commit_before>def do_step(context):
call("mkdir -p ./bosh", shell=True)
call("mkdir -p ./bosh/manifests", shell=True)
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
call("cp bosh.key ./bosh/bosh", shell=True)
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
<commit_msg>Use python libs to do file operations<commit_after>from subprocess import call
from os import makedirs
from shutil import copy
def do_step(context):
makedirs("bosh/manifests")
# Generate the private key and certificate
call("sh create_cert.sh", shell=True)
copy("bosh.key", "./bosh/bosh")
with open ('bosh_cert.pem', 'r') as tmpfile:
ssh_cert = tmpfile.read()
ssh_cert = "|\n" + ssh_cert
ssh_cert="\n ".join([line for line in ssh_cert.split('\n')])
context.meta['settings']['SSH_CERTIFICATE'] = ssh_cert
return context
|
91d4c3a27002f62fbdd949e40dce96d58bdf5394
|
billjobs/permissions.py
|
billjobs/permissions.py
|
from rest_framework import permissions
class CustomUserAPIPermission(permissions.BasePermission):
"""
Define custom permission for UserAPI and UserDetailAPI
GET : only accessible by admin
POST: public
"""
def has_permission(self, request, view):
if request.method == 'GET':
return request.user and request.user.is_staff
elif request.method == 'POST':
return True
return True
|
Write permission for user api, make POST public
|
Write permission for user api, make POST public
|
Python
|
mit
|
ioO/billjobs
|
Write permission for user api, make POST public
|
from rest_framework import permissions
class CustomUserAPIPermission(permissions.BasePermission):
"""
Define custom permission for UserAPI and UserDetailAPI
GET : only accessible by admin
POST: public
"""
def has_permission(self, request, view):
if request.method == 'GET':
return request.user and request.user.is_staff
elif request.method == 'POST':
return True
return True
|
<commit_before><commit_msg>Write permission for user api, make POST public<commit_after>
|
from rest_framework import permissions
class CustomUserAPIPermission(permissions.BasePermission):
"""
Define custom permission for UserAPI and UserDetailAPI
GET : only accessible by admin
POST: public
"""
def has_permission(self, request, view):
if request.method == 'GET':
return request.user and request.user.is_staff
elif request.method == 'POST':
return True
return True
|
Write permission for user api, make POST publicfrom rest_framework import permissions
class CustomUserAPIPermission(permissions.BasePermission):
"""
Define custom permission for UserAPI and UserDetailAPI
GET : only accessible by admin
POST: public
"""
def has_permission(self, request, view):
if request.method == 'GET':
return request.user and request.user.is_staff
elif request.method == 'POST':
return True
return True
|
<commit_before><commit_msg>Write permission for user api, make POST public<commit_after>from rest_framework import permissions
class CustomUserAPIPermission(permissions.BasePermission):
"""
Define custom permission for UserAPI and UserDetailAPI
GET : only accessible by admin
POST: public
"""
def has_permission(self, request, view):
if request.method == 'GET':
return request.user and request.user.is_staff
elif request.method == 'POST':
return True
return True
|
|
21f3d1957258f87e45ddcb39a17ecf2143c203b0
|
kindred/pycorenlp.py
|
kindred/pycorenlp.py
|
# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
def __init__(self, server_url):
self.server_url = server_url
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string"
assert isinstance(properties, dict)
data = text.encode('utf8')
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
|
# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
useSessions = True
sessions = {}
def __init__(self, server_url):
self.server_url = server_url
if StanfordCoreNLP.useSessions:
if not server_url in StanfordCoreNLP.sessions:
StanfordCoreNLP.sessions[server_url] = requests.Session()
self.session = StanfordCoreNLP.sessions[server_url]
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string, received %s" % (str(type(text)))
assert isinstance(properties, dict)
#print('X',text)
data = text.encode('utf8')
if StanfordCoreNLP.useSessions:
r = self.session.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
else:
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
|
Add experimental sessions code for CoreNLP requests
|
Add experimental sessions code for CoreNLP requests
|
Python
|
mit
|
jakelever/kindred,jakelever/kindred
|
# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
def __init__(self, server_url):
self.server_url = server_url
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string"
assert isinstance(properties, dict)
data = text.encode('utf8')
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
Add experimental sessions code for CoreNLP requests
|
# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
useSessions = True
sessions = {}
def __init__(self, server_url):
self.server_url = server_url
if StanfordCoreNLP.useSessions:
if not server_url in StanfordCoreNLP.sessions:
StanfordCoreNLP.sessions[server_url] = requests.Session()
self.session = StanfordCoreNLP.sessions[server_url]
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string, received %s" % (str(type(text)))
assert isinstance(properties, dict)
#print('X',text)
data = text.encode('utf8')
if StanfordCoreNLP.useSessions:
r = self.session.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
else:
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
|
<commit_before># Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
def __init__(self, server_url):
self.server_url = server_url
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string"
assert isinstance(properties, dict)
data = text.encode('utf8')
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
<commit_msg>Add experimental sessions code for CoreNLP requests<commit_after>
|
# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
useSessions = True
sessions = {}
def __init__(self, server_url):
self.server_url = server_url
if StanfordCoreNLP.useSessions:
if not server_url in StanfordCoreNLP.sessions:
StanfordCoreNLP.sessions[server_url] = requests.Session()
self.session = StanfordCoreNLP.sessions[server_url]
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string, received %s" % (str(type(text)))
assert isinstance(properties, dict)
#print('X',text)
data = text.encode('utf8')
if StanfordCoreNLP.useSessions:
r = self.session.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
else:
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
|
# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
def __init__(self, server_url):
self.server_url = server_url
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string"
assert isinstance(properties, dict)
data = text.encode('utf8')
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
Add experimental sessions code for CoreNLP requests# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
useSessions = True
sessions = {}
def __init__(self, server_url):
self.server_url = server_url
if StanfordCoreNLP.useSessions:
if not server_url in StanfordCoreNLP.sessions:
StanfordCoreNLP.sessions[server_url] = requests.Session()
self.session = StanfordCoreNLP.sessions[server_url]
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string, received %s" % (str(type(text)))
assert isinstance(properties, dict)
#print('X',text)
data = text.encode('utf8')
if StanfordCoreNLP.useSessions:
r = self.session.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
else:
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
|
<commit_before># Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
def __init__(self, server_url):
self.server_url = server_url
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string"
assert isinstance(properties, dict)
data = text.encode('utf8')
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
<commit_msg>Add experimental sessions code for CoreNLP requests<commit_after># Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
useSessions = True
sessions = {}
def __init__(self, server_url):
self.server_url = server_url
if StanfordCoreNLP.useSessions:
if not server_url in StanfordCoreNLP.sessions:
StanfordCoreNLP.sessions[server_url] = requests.Session()
self.session = StanfordCoreNLP.sessions[server_url]
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string, received %s" % (str(type(text)))
assert isinstance(properties, dict)
#print('X',text)
data = text.encode('utf8')
if StanfordCoreNLP.useSessions:
r = self.session.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
else:
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
|
f3d911eb4aef1a25e173874e52bb17e0f3fdd660
|
indra/db/belief.py
|
indra/db/belief.py
|
class MockStatement(object):
def __init__(self, evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
|
class MockStatement(object):
"""A class to imitate real INDRA Statements for calculating belief."""
def __init__(self, evidence, mk_hash, supports):
if isinstance(evidence, list):
self.evidence = evidence
else:
self.evidence = [evidence]
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
"""A class to imitate real INDRA Evidence for calculating belief."""
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
|
Allow evidence to be list of single instance.
|
Allow evidence to be list of single instance.
|
Python
|
bsd-2-clause
|
pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,bgyori/indra,johnbachman/indra,johnbachman/belpy,bgyori/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,johnbachman/indra,johnbachman/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/belpy,sorgerlab/indra
|
class MockStatement(object):
def __init__(self, evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
Allow evidence to be list of single instance.
|
class MockStatement(object):
"""A class to imitate real INDRA Statements for calculating belief."""
def __init__(self, evidence, mk_hash, supports):
if isinstance(evidence, list):
self.evidence = evidence
else:
self.evidence = [evidence]
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
"""A class to imitate real INDRA Evidence for calculating belief."""
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
|
<commit_before>class MockStatement(object):
def __init__(self, evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
<commit_msg>Allow evidence to be list of single instance.<commit_after>
|
class MockStatement(object):
"""A class to imitate real INDRA Statements for calculating belief."""
def __init__(self, evidence, mk_hash, supports):
if isinstance(evidence, list):
self.evidence = evidence
else:
self.evidence = [evidence]
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
"""A class to imitate real INDRA Evidence for calculating belief."""
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
|
class MockStatement(object):
def __init__(self, evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
Allow evidence to be list of single instance.class MockStatement(object):
"""A class to imitate real INDRA Statements for calculating belief."""
def __init__(self, evidence, mk_hash, supports):
if isinstance(evidence, list):
self.evidence = evidence
else:
self.evidence = [evidence]
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
"""A class to imitate real INDRA Evidence for calculating belief."""
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
|
<commit_before>class MockStatement(object):
def __init__(self, evidence, mk_hash, supports):
self.evidence = evidence
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
<commit_msg>Allow evidence to be list of single instance.<commit_after>class MockStatement(object):
"""A class to imitate real INDRA Statements for calculating belief."""
def __init__(self, evidence, mk_hash, supports):
if isinstance(evidence, list):
self.evidence = evidence
else:
self.evidence = [evidence]
self.__mk_hash = mk_hash
self.supports = supports
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
"""A class to imitate real INDRA Evidence for calculating belief."""
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
|
539f78c8ea4ca1692ae27a2d0bdc01004b5ad471
|
examples/plot_humidity.py
|
examples/plot_humidity.py
|
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather
from datetime import date
m = MagicWeather(auxdir='/fact/aux')
df = m.read_date(date(2015, 12, 31))
df.plot(x='timestamp', y='humidity', legend=False)
plt.ylabel('Humidity / %')
plt.show()
|
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather, PfMini
import pandas as pd
from tqdm import tqdm
import datetime
plt.style.use('ggplot')
magic_weather = MagicWeather(auxdir='/fact/aux')
pf_mini = PfMini(auxdir='/fact/aux')
dates = pd.date_range('2015-10-20', datetime.date.today())
outside = pd.DataFrame()
camera = pd.DataFrame()
for d in tqdm(dates):
try:
outside = outside.append(magic_weather.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
try:
camera = camera.append(pf_mini.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
outside.set_index('timestamp', inplace=True)
camera.set_index('timestamp', inplace=True)
outside = outside.resample('24h').mean()
camera = camera.resample('24h').mean()
fig, ax = plt.subplots()
ax.set_title('Camera vs. Outside Humidity (24h mean)')
outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
ax.legend()
ax.set_ylabel('Humidity / %')
fig.tight_layout()
plt.show()
|
Modify example to make camera vs. outside humidity plot
|
Modify example to make camera vs. outside humidity plot
|
Python
|
mit
|
fact-project/aux2mongodb
|
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather
from datetime import date
m = MagicWeather(auxdir='/fact/aux')
df = m.read_date(date(2015, 12, 31))
df.plot(x='timestamp', y='humidity', legend=False)
plt.ylabel('Humidity / %')
plt.show()
Modify example to make camera vs. outside humidity plot
|
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather, PfMini
import pandas as pd
from tqdm import tqdm
import datetime
plt.style.use('ggplot')
magic_weather = MagicWeather(auxdir='/fact/aux')
pf_mini = PfMini(auxdir='/fact/aux')
dates = pd.date_range('2015-10-20', datetime.date.today())
outside = pd.DataFrame()
camera = pd.DataFrame()
for d in tqdm(dates):
try:
outside = outside.append(magic_weather.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
try:
camera = camera.append(pf_mini.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
outside.set_index('timestamp', inplace=True)
camera.set_index('timestamp', inplace=True)
outside = outside.resample('24h').mean()
camera = camera.resample('24h').mean()
fig, ax = plt.subplots()
ax.set_title('Camera vs. Outside Humidity (24h mean)')
outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
ax.legend()
ax.set_ylabel('Humidity / %')
fig.tight_layout()
plt.show()
|
<commit_before>import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather
from datetime import date
m = MagicWeather(auxdir='/fact/aux')
df = m.read_date(date(2015, 12, 31))
df.plot(x='timestamp', y='humidity', legend=False)
plt.ylabel('Humidity / %')
plt.show()
<commit_msg>Modify example to make camera vs. outside humidity plot<commit_after>
|
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather, PfMini
import pandas as pd
from tqdm import tqdm
import datetime
plt.style.use('ggplot')
magic_weather = MagicWeather(auxdir='/fact/aux')
pf_mini = PfMini(auxdir='/fact/aux')
dates = pd.date_range('2015-10-20', datetime.date.today())
outside = pd.DataFrame()
camera = pd.DataFrame()
for d in tqdm(dates):
try:
outside = outside.append(magic_weather.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
try:
camera = camera.append(pf_mini.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
outside.set_index('timestamp', inplace=True)
camera.set_index('timestamp', inplace=True)
outside = outside.resample('24h').mean()
camera = camera.resample('24h').mean()
fig, ax = plt.subplots()
ax.set_title('Camera vs. Outside Humidity (24h mean)')
outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
ax.legend()
ax.set_ylabel('Humidity / %')
fig.tight_layout()
plt.show()
|
import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather
from datetime import date
m = MagicWeather(auxdir='/fact/aux')
df = m.read_date(date(2015, 12, 31))
df.plot(x='timestamp', y='humidity', legend=False)
plt.ylabel('Humidity / %')
plt.show()
Modify example to make camera vs. outside humidity plotimport matplotlib.pyplot as plt
from aux2mongodb import MagicWeather, PfMini
import pandas as pd
from tqdm import tqdm
import datetime
plt.style.use('ggplot')
magic_weather = MagicWeather(auxdir='/fact/aux')
pf_mini = PfMini(auxdir='/fact/aux')
dates = pd.date_range('2015-10-20', datetime.date.today())
outside = pd.DataFrame()
camera = pd.DataFrame()
for d in tqdm(dates):
try:
outside = outside.append(magic_weather.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
try:
camera = camera.append(pf_mini.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
outside.set_index('timestamp', inplace=True)
camera.set_index('timestamp', inplace=True)
outside = outside.resample('24h').mean()
camera = camera.resample('24h').mean()
fig, ax = plt.subplots()
ax.set_title('Camera vs. Outside Humidity (24h mean)')
outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
ax.legend()
ax.set_ylabel('Humidity / %')
fig.tight_layout()
plt.show()
|
<commit_before>import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather
from datetime import date
m = MagicWeather(auxdir='/fact/aux')
df = m.read_date(date(2015, 12, 31))
df.plot(x='timestamp', y='humidity', legend=False)
plt.ylabel('Humidity / %')
plt.show()
<commit_msg>Modify example to make camera vs. outside humidity plot<commit_after>import matplotlib.pyplot as plt
from aux2mongodb import MagicWeather, PfMini
import pandas as pd
from tqdm import tqdm
import datetime
plt.style.use('ggplot')
magic_weather = MagicWeather(auxdir='/fact/aux')
pf_mini = PfMini(auxdir='/fact/aux')
dates = pd.date_range('2015-10-20', datetime.date.today())
outside = pd.DataFrame()
camera = pd.DataFrame()
for d in tqdm(dates):
try:
outside = outside.append(magic_weather.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
try:
camera = camera.append(pf_mini.read_date(d), ignore_index=True)
except FileNotFoundError:
continue
outside.set_index('timestamp', inplace=True)
camera.set_index('timestamp', inplace=True)
outside = outside.resample('24h').mean()
camera = camera.resample('24h').mean()
fig, ax = plt.subplots()
ax.set_title('Camera vs. Outside Humidity (24h mean)')
outside.plot(y='humidity', legend=False, label='Outside', ax=ax)
camera.plot(y='humidity', legend=False, label='In Camera', ax=ax)
ax.legend()
ax.set_ylabel('Humidity / %')
fig.tight_layout()
plt.show()
|
58e5a7e332015e05498edf0f4012fc6b817b99b9
|
longclaw/longclawproducts/tests.py
|
longclaw/longclawproducts/tests.py
|
from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
|
from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
def test_product_title(self):
variant = ProductVariantFactory()
self.assertEqual(variant.get_product_title(), variant.product.title)
|
Add a test for product title
|
Add a test for product title
|
Python
|
mit
|
JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw
|
from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
Add a test for product title
|
from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
def test_product_title(self):
variant = ProductVariantFactory()
self.assertEqual(variant.get_product_title(), variant.product.title)
|
<commit_before>from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
<commit_msg>Add a test for product title<commit_after>
|
from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
def test_product_title(self):
variant = ProductVariantFactory()
self.assertEqual(variant.get_product_title(), variant.product.title)
|
from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
Add a test for product titlefrom wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
def test_product_title(self):
variant = ProductVariantFactory()
self.assertEqual(variant.get_product_title(), variant.product.title)
|
<commit_before>from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
<commit_msg>Add a test for product title<commit_after>from wagtail.tests.utils import WagtailPageTests
from longclaw.utils import maybe_get_product_model
from longclaw.tests.products.models import ProductIndex
from longclaw.tests.utils import ProductVariantFactory
from longclaw.longclawproducts.serializers import ProductVariantSerializer
class TestProducts(WagtailPageTests):
def setUp(self):
self.product_model = maybe_get_product_model()
def test_can_create_product(self):
self.assertCanCreateAt(ProductIndex, self.product_model)
def test_variant_price(self):
variant = ProductVariantFactory()
self.assertTrue(variant.price > 0)
def test_price_range(self):
variant = ProductVariantFactory()
prices = variant.product.price_range
self.assertTrue(prices[0] == prices[1])
def test_stock(self):
variant = ProductVariantFactory()
variant.stock = 1
variant.save()
self.assertTrue(variant.product.in_stock)
def test_out_of_stock(self):
variant = ProductVariantFactory()
variant.stock = 0
variant.save()
self.assertFalse(variant.product.in_stock)
def test_variant_serializer(self):
variant = ProductVariantFactory()
serializer = ProductVariantSerializer(variant)
self.assertIn('product', serializer.data)
def test_product_title(self):
variant = ProductVariantFactory()
self.assertEqual(variant.get_product_title(), variant.product.title)
|
55582454e9321f6d5cfd458261e6ce7401f4b36a
|
ocradmin/lib/nodetree/test_nodes.py
|
ocradmin/lib/nodetree/test_nodes.py
|
"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
_parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
_parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
|
"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
|
Fix parameters on test nodes
|
Fix parameters on test nodes
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
_parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
_parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
Fix parameters on test nodes
|
"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
|
<commit_before>"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
_parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
_parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
<commit_msg>Fix parameters on test nodes<commit_after>
|
"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
|
"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
_parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
_parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
Fix parameters on test nodes"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
|
<commit_before>"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
_parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
_parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
<commit_msg>Fix parameters on test nodes<commit_after>"""
Nodetree test nodes.
"""
from __future__ import absolute_import
import types
from . import node
class Number(node.Node):
"""A number constant."""
intypes = []
outtype = types.IntType
parameters = [
dict(name="num", value=0),
]
def _eval(self):
return self._params.get("num")
class Arithmetic(node.Node):
"""Operate on two numbers"""
intypes = [types.IntType, types.IntType]
outtype = types.IntType
parameters = [
dict(name="operator", value="+", choices=[
"+", "-", "*", "/",
]),
]
def _eval(self):
op = self._params.get("operator")
lhs = self.eval_input(0)
rhs = self.eval_input(1)
if op == "+":
return lhs + rhs
elif op == "-":
return lhs - rhs
elif op == "*":
return lhs * rhs
elif op == "/":
return lhs / rhs
|
e0c9f12463f1e4cc17eefbf8909118604695a23d
|
oscar/apps/search/search_indexes.py
|
oscar/apps/search/search_indexes.py
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
Fix issue with latest changes in haystack
|
Fix issue with latest changes in haystack
A merge into haystack has resulted in an additional argument for
``index_queryset``, I updated the search index for Oscar's product
to fix the issue.
|
Python
|
bsd-3-clause
|
MatthewWilkes/django-oscar,jmt4/django-oscar,michaelkuty/django-oscar,jlmadurga/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,makielab/django-oscar,Bogh/django-oscar,jinnykoo/christmas,WadeYuChen/django-oscar,faratro/django-oscar,eddiep1101/django-oscar,jinnykoo/christmas,itbabu/django-oscar,john-parton/django-oscar,WillisXChen/django-oscar,pasqualguerrero/django-oscar,mexeniz/django-oscar,dongguangming/django-oscar,binarydud/django-oscar,django-oscar/django-oscar,bnprk/django-oscar,kapt/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,ka7eh/django-oscar,MatthewWilkes/django-oscar,jinnykoo/wuyisj,itbabu/django-oscar,elliotthill/django-oscar,MatthewWilkes/django-oscar,rocopartners/django-oscar,itbabu/django-oscar,pasqualguerrero/django-oscar,nickpack/django-oscar,bschuon/django-oscar,pasqualguerrero/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,nickpack/django-oscar,ahmetdaglarbas/e-commerce,binarydud/django-oscar,jinnykoo/wuyisj,ahmetdaglarbas/e-commerce,WadeYuChen/django-oscar,machtfit/django-oscar,anentropic/django-oscar,eddiep1101/django-oscar,michaelkuty/django-oscar,saadatqadri/django-oscar,dongguangming/django-oscar,lijoantony/django-oscar,amirrpp/django-oscar,sasha0/django-oscar,amirrpp/django-oscar,pdonadeo/django-oscar,lijoantony/django-oscar,mexeniz/django-oscar,marcoantoniooliveira/labweb,Jannes123/django-oscar,WillisXChen/django-oscar,kapari/django-oscar,bschuon/django-oscar,makielab/django-oscar,dongguangming/django-oscar,marcoantoniooliveira/labweb,solarissmoke/django-oscar,rocopartners/django-oscar,DrOctogon/unwash_ecom,spartonia/django-oscar,makielab/django-oscar,taedori81/django-oscar,Idematica/django-oscar,marcoantoniooliveira/labweb,spartonia/django-oscar,john-parton/django-oscar,okfish/django-oscar,elliotthill/django-oscar,Idematica/django-oscar,okfish/django-oscar,faratro/django-oscar,john-parton/django-oscar,manevant/django-oscar,makielab/django-oscar,pasqualguerrero/django-oscar,monikasulik/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,faratro/django-oscar,josesanch/django-oscar,kapari/django-oscar,WillisXChen/django-oscar,eddiep1101/django-oscar,jlmadurga/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj.com,amirrpp/django-oscar,Jannes123/django-oscar,manevant/django-oscar,rocopartners/django-oscar,josesanch/django-oscar,sonofatailor/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj.com,jmt4/django-oscar,jmt4/django-oscar,elliotthill/django-oscar,sonofatailor/django-oscar,jinnykoo/wuyisj.com,amirrpp/django-oscar,QLGu/django-oscar,solarissmoke/django-oscar,adamend/django-oscar,pdonadeo/django-oscar,sasha0/django-oscar,manevant/django-oscar,monikasulik/django-oscar,thechampanurag/django-oscar,anentropic/django-oscar,lijoantony/django-oscar,marcoantoniooliveira/labweb,anentropic/django-oscar,machtfit/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,adamend/django-oscar,ademuk/django-oscar,machtfit/django-oscar,ademuk/django-oscar,vovanbo/django-oscar,ahmetdaglarbas/e-commerce,ka7eh/django-oscar,nfletton/django-oscar,saadatqadri/django-oscar,michaelkuty/django-oscar,jmt4/django-oscar,jinnykoo/christmas,spartonia/django-oscar,adamend/django-oscar,nfletton/django-oscar,faratro/django-oscar,QLGu/django-oscar,ahmetdaglarbas/e-commerce,WillisXChen/django-oscar,bnprk/django-oscar,sasha0/django-oscar,bnprk/django-oscar,vovanbo/django-oscar,mexeniz/django-oscar,kapari/django-oscar,Jannes123/django-oscar,anentropic/django-oscar,mexeniz/django-oscar,binarydud/django-oscar,adamend/django-oscar,Bogh/django-oscar,kapt/django-oscar,thechampanurag/django-oscar,QLGu/django-oscar,taedori81/django-oscar,ka7eh/django-oscar,Bogh/django-oscar,ademuk/django-oscar,jinnykoo/wuyisj.com,josesanch/django-oscar,bschuon/django-oscar,taedori81/django-oscar,monikasulik/django-oscar,spartonia/django-oscar,pdonadeo/django-oscar,vovanbo/django-oscar,bnprk/django-oscar,okfish/django-oscar,WadeYuChen/django-oscar,monikasulik/django-oscar,okfish/django-oscar,pdonadeo/django-oscar,jinnykoo/wuyisj,sasha0/django-oscar,ademuk/django-oscar,itbabu/django-oscar,solarissmoke/django-oscar,kapari/django-oscar,vovanbo/django-oscar,dongguangming/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,nickpack/django-oscar,michaelkuty/django-oscar,QLGu/django-oscar,Idematica/django-oscar,DrOctogon/unwash_ecom,django-oscar/django-oscar,manevant/django-oscar,binarydud/django-oscar,thechampanurag/django-oscar,bschuon/django-oscar,nickpack/django-oscar,john-parton/django-oscar,eddiep1101/django-oscar,MatthewWilkes/django-oscar,saadatqadri/django-oscar,taedori81/django-oscar,kapt/django-oscar,Bogh/django-oscar,thechampanurag/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,jlmadurga/django-oscar,Jannes123/django-oscar
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
Fix issue with latest changes in haystack
A merge into haystack has resulted in an additional argument for
``index_queryset``, I updated the search index for Oscar's product
to fix the issue.
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
<commit_before>from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
<commit_msg>Fix issue with latest changes in haystack
A merge into haystack has resulted in an additional argument for
``index_queryset``, I updated the search index for Oscar's product
to fix the issue.<commit_after>
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
Fix issue with latest changes in haystack
A merge into haystack has resulted in an additional argument for
``index_queryset``, I updated the search index for Oscar's product
to fix the issue.from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
<commit_before>from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
<commit_msg>Fix issue with latest changes in haystack
A merge into haystack has resulted in an additional argument for
``index_queryset``, I updated the search index for Oscar's product
to fix the issue.<commit_after>from haystack import indexes
from django.db.models import get_model
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
"""
Base class for products solr index definition. Overide by creating your
own copy of oscar.search_indexes.py
"""
text = indexes.EdgeNgramField(document=True, use_template=True,
template_name='oscar/search/indexes/product/item_text.txt')
title = indexes.EdgeNgramField(model_attr='title', null=True)
upc = indexes.CharField(model_attr="upc", null=True)
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
Orders by the most recently updated so that new objects are indexed first
"""
return self.get_model().objects.order_by('-date_updated')
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
|
957c74c5083eaab466fc72e21afc929267191676
|
openedx/features/job_board/views.py
|
openedx/features/job_board/views.py
|
from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def get_context_data(self, **kwargs):
context = super(JobListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
|
from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
|
Remove get_context_data override and use the paginator total count instead
|
Remove get_context_data override and use the paginator total count instead
|
Python
|
agpl-3.0
|
philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform
|
from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def get_context_data(self, **kwargs):
context = super(JobListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
Remove get_context_data override and use the paginator total count instead
|
from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
|
<commit_before>from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def get_context_data(self, **kwargs):
context = super(JobListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
<commit_msg>Remove get_context_data override and use the paginator total count instead<commit_after>
|
from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
|
from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def get_context_data(self, **kwargs):
context = super(JobListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
Remove get_context_data override and use the paginator total count insteadfrom django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
|
<commit_before>from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def get_context_data(self, **kwargs):
context = super(JobListView, self).get_context_data(**kwargs)
context['total_job_count'] = Job.objects.all().count()
return context
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
<commit_msg>Remove get_context_data override and use the paginator total count instead<commit_after>from django.views.generic.list import ListView
from edxmako.shortcuts import render_to_response
from .models import Job
class JobListView(ListView):
model = Job
context_object_name = 'job_list'
paginate_by = 10
template_name = 'features/job_board/job_list.html'
ordering = ['-created']
template_engine = 'mako'
def show_job_detail(request):
return render_to_response('features/job_board/job_detail.html', {})
|
7eac938f0a4726beb1eb01d32486dfeb0e57ff3a
|
h2o-hadoop/tests/python/pyunit_s3_import_export.py
|
h2o-hadoop/tests/python/pyunit_s3_import_export.py
|
#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + timestamp + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
|
#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
import uuid
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
unique_suffix = str(uuid.uuid4())
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
|
Fix flaky Hadoop smoke tests - make sure the exported files are unique
|
Fix flaky Hadoop smoke tests - make sure the exported files are unique
|
Python
|
apache-2.0
|
h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,michalkurka/h2o-3
|
#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + timestamp + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
Fix flaky Hadoop smoke tests - make sure the exported files are unique
|
#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
import uuid
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
unique_suffix = str(uuid.uuid4())
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
|
<commit_before>#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + timestamp + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
<commit_msg>Fix flaky Hadoop smoke tests - make sure the exported files are unique<commit_after>
|
#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
import uuid
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
unique_suffix = str(uuid.uuid4())
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
|
#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + timestamp + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
Fix flaky Hadoop smoke tests - make sure the exported files are unique#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
import uuid
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
unique_suffix = str(uuid.uuid4())
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
|
<commit_before>#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + timestamp + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
<commit_msg>Fix flaky Hadoop smoke tests - make sure the exported files are unique<commit_after>#! /usr/env/python
import sys, os
sys.path.insert(1, os.path.join("..","..",".."))
from tests import pyunit_utils
from datetime import datetime
import h2o
import uuid
from pandas.util.testing import assert_frame_equal
def s3_import_export():
local_frame = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
for scheme in ["s3n", "s3a"]:
timestamp = datetime.today().utcnow().strftime("%Y%m%d-%H%M%S")
unique_suffix = str(uuid.uuid4())
s3_path = scheme + "://test.0xdata.com/h2o-hadoop-tests/test-export/" + scheme + "/exported." + \
timestamp + "." + unique_suffix + ".csv.zip"
h2o.export_file(local_frame, s3_path)
s3_frame = h2o.import_file(s3_path)
assert_frame_equal(local_frame.as_data_frame(), s3_frame.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(s3_import_export)
else:
s3_import_export()
|
00feeb0d9515d5f47a9c22244edd817c629a96e6
|
conftest.py
|
conftest.py
|
import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
)
|
import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
MIDDLEWARE_CLASSES = [],
)
|
Set MIDDLEWARE_CLASSES so Django will stop complaining
|
Set MIDDLEWARE_CLASSES so Django will stop complaining
|
Python
|
bsd-3-clause
|
altaurog/django-caspy,altaurog/django-caspy,altaurog/django-caspy
|
import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
)
Set MIDDLEWARE_CLASSES so Django will stop complaining
|
import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
MIDDLEWARE_CLASSES = [],
)
|
<commit_before>import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
)
<commit_msg>Set MIDDLEWARE_CLASSES so Django will stop complaining<commit_after>
|
import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
MIDDLEWARE_CLASSES = [],
)
|
import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
)
Set MIDDLEWARE_CLASSES so Django will stop complainingimport os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
MIDDLEWARE_CLASSES = [],
)
|
<commit_before>import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
)
<commit_msg>Set MIDDLEWARE_CLASSES so Django will stop complaining<commit_after>import os
import sys
from django.conf import settings
def pytest_configure():
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
settings.configure(
INSTALLED_APPS = (
'caspy',
'rest_framework',
'testapp',
),
ROOT_URLCONF = 'caspy.urls',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
USE_TZ = True,
MIDDLEWARE_CLASSES = [],
)
|
9d919434ceb24fcd4e2827ce6570dde9296d7ae2
|
out2in.py
|
out2in.py
|
#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.txt')
for name in out_files[:-1]:
os.remove(name)
|
#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.csv')
for name in out_files[:-1]:
os.remove(name)
|
Fix mistyped input file name
|
Fix mistyped input file name
|
Python
|
mit
|
dzhang314/pcreo
|
#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.txt')
for name in out_files[:-1]:
os.remove(name)
Fix mistyped input file name
|
#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.csv')
for name in out_files[:-1]:
os.remove(name)
|
<commit_before>#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.txt')
for name in out_files[:-1]:
os.remove(name)
<commit_msg>Fix mistyped input file name<commit_after>
|
#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.csv')
for name in out_files[:-1]:
os.remove(name)
|
#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.txt')
for name in out_files[:-1]:
os.remove(name)
Fix mistyped input file name#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.csv')
for name in out_files[:-1]:
os.remove(name)
|
<commit_before>#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.txt')
for name in out_files[:-1]:
os.remove(name)
<commit_msg>Fix mistyped input file name<commit_after>#!/usr/bin/env python3
import glob
import os
out_files = glob.glob('pcreo_??????????.csv')
out_files.sort()
if len(out_files) == 0:
print("WARNING: No PCreo output files found.")
else:
os.rename(out_files[-1], 'pcreo_input.csv')
for name in out_files[:-1]:
os.remove(name)
|
d869b5b31eddcbc1303affb1555c2117a365b64a
|
models/recordsfields_artemplate.py
|
models/recordsfields_artemplate.py
|
from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
RecodrdsFieldARTemplate.initialze(schema)
|
from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
analysis_schema = (fields.Many2one(string='Priority',
comodel_name='olims.ar_priority'),
fields.Many2one(string='Partition',
comodel_name='olims.ar_partition'),
fields.Char(string="Error"),
fields.Char(string="Min"),
fields.Char(string="Max"),
fields.Many2one(string='analysis_request_id', comodel_name ='olims.analysis_request'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
class ARAnalysis(models.Model, BaseOLiMSModel):
_inherit = 'olims.records_field_artemplates'
_name = 'olims.ar_analysis'
RecodrdsFieldARTemplate.initialze(schema)
ARAnalysis.initialze(analysis_schema)
|
Define a model for 'Analyses' in Analysis Request inherit from records_field_artemplate.
|
Define a model for 'Analyses' in Analysis Request inherit from records_field_artemplate.
|
Python
|
agpl-3.0
|
sciCloud/OLiMS,sciCloud/OLiMS,yasir1brahim/OLiMS,sciCloud/OLiMS
|
from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
RecodrdsFieldARTemplate.initialze(schema)Define a model for 'Analyses' in Analysis Request inherit from records_field_artemplate.
|
from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
analysis_schema = (fields.Many2one(string='Priority',
comodel_name='olims.ar_priority'),
fields.Many2one(string='Partition',
comodel_name='olims.ar_partition'),
fields.Char(string="Error"),
fields.Char(string="Min"),
fields.Char(string="Max"),
fields.Many2one(string='analysis_request_id', comodel_name ='olims.analysis_request'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
class ARAnalysis(models.Model, BaseOLiMSModel):
_inherit = 'olims.records_field_artemplates'
_name = 'olims.ar_analysis'
RecodrdsFieldARTemplate.initialze(schema)
ARAnalysis.initialze(analysis_schema)
|
<commit_before>from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
RecodrdsFieldARTemplate.initialze(schema)<commit_msg>Define a model for 'Analyses' in Analysis Request inherit from records_field_artemplate.<commit_after>
|
from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
analysis_schema = (fields.Many2one(string='Priority',
comodel_name='olims.ar_priority'),
fields.Many2one(string='Partition',
comodel_name='olims.ar_partition'),
fields.Char(string="Error"),
fields.Char(string="Min"),
fields.Char(string="Max"),
fields.Many2one(string='analysis_request_id', comodel_name ='olims.analysis_request'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
class ARAnalysis(models.Model, BaseOLiMSModel):
_inherit = 'olims.records_field_artemplates'
_name = 'olims.ar_analysis'
RecodrdsFieldARTemplate.initialze(schema)
ARAnalysis.initialze(analysis_schema)
|
from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
RecodrdsFieldARTemplate.initialze(schema)Define a model for 'Analyses' in Analysis Request inherit from records_field_artemplate.from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
analysis_schema = (fields.Many2one(string='Priority',
comodel_name='olims.ar_priority'),
fields.Many2one(string='Partition',
comodel_name='olims.ar_partition'),
fields.Char(string="Error"),
fields.Char(string="Min"),
fields.Char(string="Max"),
fields.Many2one(string='analysis_request_id', comodel_name ='olims.analysis_request'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
class ARAnalysis(models.Model, BaseOLiMSModel):
_inherit = 'olims.records_field_artemplates'
_name = 'olims.ar_analysis'
RecodrdsFieldARTemplate.initialze(schema)
ARAnalysis.initialze(analysis_schema)
|
<commit_before>from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
RecodrdsFieldARTemplate.initialze(schema)<commit_msg>Define a model for 'Analyses' in Analysis Request inherit from records_field_artemplate.<commit_after>from openerp import fields, models, api
from base_olims_model import BaseOLiMSModel
schema = (fields.Many2one(string='Services',
comodel_name='olims.analysis_service',
domain="[('category', '=', Category)]",
relation='recordfield_service'),
fields.Boolean(string='Hidden',readonly=False),
fields.Float(string='Price', default=0.00,compute='_ComputeServicePriceField'),
fields.Many2one(string='Partition',
comodel_name='olims.partition_ar_template'),
fields.Many2one(string='Category',
comodel_name='olims.analysis_category'),
)
analysis_schema = (fields.Many2one(string='Priority',
comodel_name='olims.ar_priority'),
fields.Many2one(string='Partition',
comodel_name='olims.ar_partition'),
fields.Char(string="Error"),
fields.Char(string="Min"),
fields.Char(string="Max"),
fields.Many2one(string='analysis_request_id', comodel_name ='olims.analysis_request'),
)
class RecodrdsFieldARTemplate(models.Model, BaseOLiMSModel):
_name='olims.records_field_artemplates'
@api.onchange('Services')
def _ComputeServicePriceField(self):
# set auto-changing field
for item in self:
if item.Services:
item.Price = item.Services.Price
@api.onchange('Services')
def _OnChangeGetServiceHiddenField(self):
# set auto-changing field
if self.Services:
self.Hidden = self.Services.Hidden
class ARAnalysis(models.Model, BaseOLiMSModel):
_inherit = 'olims.records_field_artemplates'
_name = 'olims.ar_analysis'
RecodrdsFieldARTemplate.initialze(schema)
ARAnalysis.initialze(analysis_schema)
|
a7534be2fdc147321f180aee38c8d5879bd3f4ad
|
stestr/tests/base.py
|
stestr/tests/base.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
import testtools
class TestCase(testtools.TestCase):
true = ('True', 'true', '1', 'yes')
def setUp(self):
super(TestCase, self).setUp()
if os.environ.get('OS_STDOUT_CAPTURE') in self.true:
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if os.environ.get('OS_STDERR_CAPTURE') in self.true:
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
if (os.environ.get('OS_LOG_CAPTURE') != 'False' and
os.environ.get('OS_LOG_CAPTURE') != '0'):
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import testtools
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
|
Make unit test attachment fixtures always enabled
|
Make unit test attachment fixtures always enabled
The code used in the base test class was copied from other projects and
made the stdout, stderr, and logging attachments enabled optionally by
env variables. However there is no reason to do that since if there is
any output it is best to capture it instead of dropping it. This commit
removes the conditionals and always attaches these.
|
Python
|
apache-2.0
|
mtreinish/stestr,mtreinish/stestr,masayukig/stestr,masayukig/stestr
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
import testtools
class TestCase(testtools.TestCase):
true = ('True', 'true', '1', 'yes')
def setUp(self):
super(TestCase, self).setUp()
if os.environ.get('OS_STDOUT_CAPTURE') in self.true:
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if os.environ.get('OS_STDERR_CAPTURE') in self.true:
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
if (os.environ.get('OS_LOG_CAPTURE') != 'False' and
os.environ.get('OS_LOG_CAPTURE') != '0'):
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
Make unit test attachment fixtures always enabled
The code used in the base test class was copied from other projects and
made the stdout, stderr, and logging attachments enabled optionally by
env variables. However there is no reason to do that since if there is
any output it is best to capture it instead of dropping it. This commit
removes the conditionals and always attaches these.
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import testtools
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
import testtools
class TestCase(testtools.TestCase):
true = ('True', 'true', '1', 'yes')
def setUp(self):
super(TestCase, self).setUp()
if os.environ.get('OS_STDOUT_CAPTURE') in self.true:
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if os.environ.get('OS_STDERR_CAPTURE') in self.true:
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
if (os.environ.get('OS_LOG_CAPTURE') != 'False' and
os.environ.get('OS_LOG_CAPTURE') != '0'):
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
<commit_msg>Make unit test attachment fixtures always enabled
The code used in the base test class was copied from other projects and
made the stdout, stderr, and logging attachments enabled optionally by
env variables. However there is no reason to do that since if there is
any output it is best to capture it instead of dropping it. This commit
removes the conditionals and always attaches these.<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import testtools
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
import testtools
class TestCase(testtools.TestCase):
true = ('True', 'true', '1', 'yes')
def setUp(self):
super(TestCase, self).setUp()
if os.environ.get('OS_STDOUT_CAPTURE') in self.true:
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if os.environ.get('OS_STDERR_CAPTURE') in self.true:
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
if (os.environ.get('OS_LOG_CAPTURE') != 'False' and
os.environ.get('OS_LOG_CAPTURE') != '0'):
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
Make unit test attachment fixtures always enabled
The code used in the base test class was copied from other projects and
made the stdout, stderr, and logging attachments enabled optionally by
env variables. However there is no reason to do that since if there is
any output it is best to capture it instead of dropping it. This commit
removes the conditionals and always attaches these.# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import testtools
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
import testtools
class TestCase(testtools.TestCase):
true = ('True', 'true', '1', 'yes')
def setUp(self):
super(TestCase, self).setUp()
if os.environ.get('OS_STDOUT_CAPTURE') in self.true:
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if os.environ.get('OS_STDERR_CAPTURE') in self.true:
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
if (os.environ.get('OS_LOG_CAPTURE') != 'False' and
os.environ.get('OS_LOG_CAPTURE') != '0'):
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
<commit_msg>Make unit test attachment fixtures always enabled
The code used in the base test class was copied from other projects and
made the stdout, stderr, and logging attachments enabled optionally by
env variables. However there is no reason to do that since if there is
any output it is best to capture it instead of dropping it. This commit
removes the conditionals and always attaches these.<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import testtools
class TestCase(testtools.TestCase):
def setUp(self):
super(TestCase, self).setUp()
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
level=None))
|
1091d541db93f533f1e078118825257bc3789371
|
data/streaming_test.py
|
data/streaming_test.py
|
#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = glob.glob(srcPath+"*")
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
|
#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = sorted(glob.glob(srcPath+"*"),key=os.path.getmtime)
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
|
Sort files by modification date when testing streaming to preserve order
|
Sort files by modification date when testing streaming to preserve order
|
Python
|
apache-2.0
|
kcompher/thunder,zhwa/thunder,kcompher/thunder,pearsonlab/thunder,kunallillaney/thunder,poolio/thunder,j-friedrich/thunder,poolio/thunder,j-friedrich/thunder,oliverhuangchao/thunder,thunder-project/thunder,jwittenbach/thunder,broxtronix/thunder,pearsonlab/thunder,oliverhuangchao/thunder,kunallillaney/thunder,broxtronix/thunder,mikarubi/thunder,zhwa/thunder,mikarubi/thunder
|
#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = glob.glob(srcPath+"*")
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
Sort files by modification date when testing streaming to preserve order
|
#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = sorted(glob.glob(srcPath+"*"),key=os.path.getmtime)
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
|
<commit_before>#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = glob.glob(srcPath+"*")
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
<commit_msg>Sort files by modification date when testing streaming to preserve order<commit_after>
|
#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = sorted(glob.glob(srcPath+"*"),key=os.path.getmtime)
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
|
#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = glob.glob(srcPath+"*")
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
Sort files by modification date when testing streaming to preserve order#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = sorted(glob.glob(srcPath+"*"),key=os.path.getmtime)
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
|
<commit_before>#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = glob.glob(srcPath+"*")
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
<commit_msg>Sort files by modification date when testing streaming to preserve order<commit_after>#!/usr/bin/env python
#
# test a streaming app by dumping files from one directory
# into another, at a specified rate
#
# <streaming_test> srcPath targetPath waitTime
#
# example:
# data/streaming_test.py /groups/ahrens/ahrenslab/Misha/forJeremy_SparkStreamingSample/ /nobackup/freeman/buffer/ 1
#
import sys, os, time, glob;
srcPath = str(sys.argv[1])
targetPath = str(sys.argv[2])
waitTime = float(sys.argv[3])
files = sorted(glob.glob(srcPath+"*"),key=os.path.getmtime)
count = 1
for f in files:
cmd = "scp " + f + " " + targetPath
os.system(cmd)
print('writing file ' +str(count))
count = count + 1
time.sleep(waitTime)
|
4976931febdbddec362411b62c7574d4a26368d5
|
launch_instance.py
|
launch_instance.py
|
# License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
|
# License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None, initial_check=True):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
if initial_check:
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
|
Make the initialization wait optional
|
Make the initialization wait optional
|
Python
|
mit
|
Astroua/aws_controller,Astroua/aws_controller
|
# License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
Make the initialization wait optional
|
# License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None, initial_check=True):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
if initial_check:
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
|
<commit_before># License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
<commit_msg>Make the initialization wait optional<commit_after>
|
# License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None, initial_check=True):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
if initial_check:
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
|
# License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
Make the initialization wait optional# License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None, initial_check=True):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
if initial_check:
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
|
<commit_before># License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
<commit_msg>Make the initialization wait optional<commit_after># License under the MIT License - see LICENSE
import boto.ec2
import os
import time
def launch(key_name, region='us-west-2', image_id='ami-5189a661',
instance_type='t2.micro', security_groups='launch-wizard-1',
user_data=None, initial_check=True):
'''
'''
if not isinstance(security_groups, list):
security_groups = [security_groups]
ec2 = boto.ec2.connect_to_region(region)
reserve = ec2.run_instances(image_id, key_name=key_name,
instance_type=instance_type,
security_groups=security_groups,
user_data=user_data)
inst = reserve.instances[0]
while inst.state == u'pending':
time.sleep(10)
inst.update()
# Wait for the status checks first
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
if initial_check:
check_stat = "Status:initializing"
while str(status.system_status) == check_stat and str(status.instance_status) == check_stat:
time.sleep(10)
status = ec2.get_all_instance_status(instance_ids=[inst.id])[0]
return inst
# ec2.get_instance_attribute('i-336b69f6', 'instanceType')
|
ccc2a583ca2365609e0da0d4bdc5e00d49cd172b
|
bob/templatetags/bob.py
|
bob/templatetags/bob.py
|
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
fragment = context['page'].fragment[name]
kwargs['fragment'] = fragment
template_names = fragment.template_names
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
|
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
kwargs['fragment'] = fragment = context['page'].fragment[name]
template_names = fragment.template_names[:]
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
|
Copy the template name list from the fragment so we don't mutate it
|
Copy the template name list from the fragment so we don't mutate it
|
Python
|
mit
|
funkybob/bobcms,funkybob/bobcms
|
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
fragment = context['page'].fragment[name]
kwargs['fragment'] = fragment
template_names = fragment.template_names
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
Copy the template name list from the fragment so we don't mutate it
|
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
kwargs['fragment'] = fragment = context['page'].fragment[name]
template_names = fragment.template_names[:]
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
|
<commit_before>
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
fragment = context['page'].fragment[name]
kwargs['fragment'] = fragment
template_names = fragment.template_names
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
<commit_msg>Copy the template name list from the fragment so we don't mutate it<commit_after>
|
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
kwargs['fragment'] = fragment = context['page'].fragment[name]
template_names = fragment.template_names[:]
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
|
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
fragment = context['page'].fragment[name]
kwargs['fragment'] = fragment
template_names = fragment.template_names
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
Copy the template name list from the fragment so we don't mutate it
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
kwargs['fragment'] = fragment = context['page'].fragment[name]
template_names = fragment.template_names[:]
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
|
<commit_before>
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
fragment = context['page'].fragment[name]
kwargs['fragment'] = fragment
template_names = fragment.template_names
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
<commit_msg>Copy the template name list from the fragment so we don't mutate it<commit_after>
from django import template
from django.template.loaders import select_template
register = template.Library()
class PlaceholderNode(template.Node):
def __init__(self, name, **kwargs):
self.name = name
self.kwargs = kwargs
def render(self, context):
name = self.name.resolve(context)
kwargs = {
name: value.resolve(context)
for name, value in self.kwargs.items()
}
kwargs['fragment'] = fragment = context['page'].fragment[name]
template_names = fragment.template_names[:]
if 'template' in kwargs:
template_names.insert(0, kwargs.pop('template'))
tmpl = select_template(template_names)
with context.push(**kwargs):
return tmpl.render(context)
@register.tag
def placeholder(parser, token):
bits = token.contents.split()
try:
name = bits.pop(0)
except IndexError:
raise template.TemplateSyntaxError(
'Placeholder requires one positional argument'
)
try:
name = template.Variable(name).resolve({})
except template.VariableDoesNotExist:
raise template.TemplateSyntaxError(
'Placeholder name must be a literal.'
)
kwargs = template.token_kwargs(bits, parser)
if bits:
raise template.TemplateSyntaxError(
'Placeholder accepts only one positional argument.'
)
return PlaceholderNode(name, **kwargs)
|
cc7604bc06616b3450db4365141738c92e8285b8
|
memegen/settings.py
|
memegen/settings.py
|
import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
|
import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
|
Disable SQLAlchemy change tracking signals
|
Disable SQLAlchemy change tracking signals
This is enabled by default, which currently emits a warning.
The default will be False in the future.
|
Python
|
mit
|
DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen
|
import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
Disable SQLAlchemy change tracking signals
This is enabled by default, which currently emits a warning.
The default will be False in the future.
|
import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
|
<commit_before>import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
<commit_msg>Disable SQLAlchemy change tracking signals
This is enabled by default, which currently emits a warning.
The default will be False in the future.<commit_after>
|
import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
|
import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
Disable SQLAlchemy change tracking signals
This is enabled by default, which currently emits a warning.
The default will be False in the future.import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
|
<commit_before>import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
<commit_msg>Disable SQLAlchemy change tracking signals
This is enabled by default, which currently emits a warning.
The default will be False in the future.<commit_after>import os
class Config:
"""Base configuration."""
ENV = None
PATH = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.dirname(PATH)
DEBUG = False
THREADED = False
GOOGLE_ANALYTICS_TID = os.getenv('GOOGLE_ANALYTICS_TID')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
class TestConfig(Config):
"""Test configuration."""
ENV = 'test'
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = "postgresql://localhost/memegen_test"
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
def get_config(name):
assert name, "no configuration specified"
for config in Config.__subclasses__(): # pylint: disable=no-member
if config.ENV == name:
return config
assert False, "no matching configuration"
|
e3a95d00444fb981d7aaf6d3beffca8796a8891f
|
mycroft/frontends/tts/mimic_tts.py
|
mycroft/frontends/tts/mimic_tts.py
|
from subprocess import call
from mycroft.frontends.tts.tts_plugin import TtsPlugin
class MimicTts(TtsPlugin):
def read(self, text):
call(['mimic', '-t', text, '-voice', self.config['voice']])
|
from subprocess import call
from os.path import isdir
from mycroft.frontends.tts.tts_plugin import TtsPlugin
from mycroft.util.git_repo import GitRepo
class MimicTts(TtsPlugin):
def __init__(self, rt):
super().__init__(rt)
if not isdir(self.rt.paths.mimic_exe):
self.download_mimic()
def download_mimic(self):
repo = GitRepo(self.rt.paths.mimic, self.config['url'], 'master')
repo.try_pull()
repo.run_inside('./dependencies.sh --prefix="/usr/local"')
repo.run_inside('./autogen.sh')
repo.run_inside('./configure.sh --prefix="/usr/local"')
repo.run_inside('make -j2')
def read(self, text):
call([self.rt.paths.mimic_exe, '-t', text, '-voice', self.config['voice']])
|
Add download and compile step to mimic
|
Add download and compile step to mimic
|
Python
|
apache-2.0
|
MatthewScholefield/mycroft-simple,MatthewScholefield/mycroft-simple
|
from subprocess import call
from mycroft.frontends.tts.tts_plugin import TtsPlugin
class MimicTts(TtsPlugin):
def read(self, text):
call(['mimic', '-t', text, '-voice', self.config['voice']])
Add download and compile step to mimic
|
from subprocess import call
from os.path import isdir
from mycroft.frontends.tts.tts_plugin import TtsPlugin
from mycroft.util.git_repo import GitRepo
class MimicTts(TtsPlugin):
def __init__(self, rt):
super().__init__(rt)
if not isdir(self.rt.paths.mimic_exe):
self.download_mimic()
def download_mimic(self):
repo = GitRepo(self.rt.paths.mimic, self.config['url'], 'master')
repo.try_pull()
repo.run_inside('./dependencies.sh --prefix="/usr/local"')
repo.run_inside('./autogen.sh')
repo.run_inside('./configure.sh --prefix="/usr/local"')
repo.run_inside('make -j2')
def read(self, text):
call([self.rt.paths.mimic_exe, '-t', text, '-voice', self.config['voice']])
|
<commit_before>from subprocess import call
from mycroft.frontends.tts.tts_plugin import TtsPlugin
class MimicTts(TtsPlugin):
def read(self, text):
call(['mimic', '-t', text, '-voice', self.config['voice']])
<commit_msg>Add download and compile step to mimic<commit_after>
|
from subprocess import call
from os.path import isdir
from mycroft.frontends.tts.tts_plugin import TtsPlugin
from mycroft.util.git_repo import GitRepo
class MimicTts(TtsPlugin):
def __init__(self, rt):
super().__init__(rt)
if not isdir(self.rt.paths.mimic_exe):
self.download_mimic()
def download_mimic(self):
repo = GitRepo(self.rt.paths.mimic, self.config['url'], 'master')
repo.try_pull()
repo.run_inside('./dependencies.sh --prefix="/usr/local"')
repo.run_inside('./autogen.sh')
repo.run_inside('./configure.sh --prefix="/usr/local"')
repo.run_inside('make -j2')
def read(self, text):
call([self.rt.paths.mimic_exe, '-t', text, '-voice', self.config['voice']])
|
from subprocess import call
from mycroft.frontends.tts.tts_plugin import TtsPlugin
class MimicTts(TtsPlugin):
def read(self, text):
call(['mimic', '-t', text, '-voice', self.config['voice']])
Add download and compile step to mimicfrom subprocess import call
from os.path import isdir
from mycroft.frontends.tts.tts_plugin import TtsPlugin
from mycroft.util.git_repo import GitRepo
class MimicTts(TtsPlugin):
def __init__(self, rt):
super().__init__(rt)
if not isdir(self.rt.paths.mimic_exe):
self.download_mimic()
def download_mimic(self):
repo = GitRepo(self.rt.paths.mimic, self.config['url'], 'master')
repo.try_pull()
repo.run_inside('./dependencies.sh --prefix="/usr/local"')
repo.run_inside('./autogen.sh')
repo.run_inside('./configure.sh --prefix="/usr/local"')
repo.run_inside('make -j2')
def read(self, text):
call([self.rt.paths.mimic_exe, '-t', text, '-voice', self.config['voice']])
|
<commit_before>from subprocess import call
from mycroft.frontends.tts.tts_plugin import TtsPlugin
class MimicTts(TtsPlugin):
def read(self, text):
call(['mimic', '-t', text, '-voice', self.config['voice']])
<commit_msg>Add download and compile step to mimic<commit_after>from subprocess import call
from os.path import isdir
from mycroft.frontends.tts.tts_plugin import TtsPlugin
from mycroft.util.git_repo import GitRepo
class MimicTts(TtsPlugin):
def __init__(self, rt):
super().__init__(rt)
if not isdir(self.rt.paths.mimic_exe):
self.download_mimic()
def download_mimic(self):
repo = GitRepo(self.rt.paths.mimic, self.config['url'], 'master')
repo.try_pull()
repo.run_inside('./dependencies.sh --prefix="/usr/local"')
repo.run_inside('./autogen.sh')
repo.run_inside('./configure.sh --prefix="/usr/local"')
repo.run_inside('make -j2')
def read(self, text):
call([self.rt.paths.mimic_exe, '-t', text, '-voice', self.config['voice']])
|
345794f454642d3a313b8da4c87a874ed9521c09
|
preprocessing/collect_unigrams.py
|
preprocessing/collect_unigrams.py
|
# -*- coding: utf-8 -*-
"""
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
ARTICLES_FILEPATH = "/media/aj/grab/nlp/corpus/processed/wikipedia-ner/annotated-fulltext.txt"
WRITE_UNIGRAMS_FILEPATH = os.path.join(CURRENT_DIR, "unigrams.txt")
WRITE_UNIGRAMS_PERSON_FILEPATH = os.path.join(CURRENT_DIR, "unigrams_per.txt")
def main():
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(WRITE_UNIGRAMS_FILEPATH)
ug_all = None
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(WRITE_UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
File to collect all unigrams and all name-unigrams (label PER) from a corpus file.
The corpus file must have one document/article per line. The words must be labeled in the
form word/LABEL.
Example file content:
Yestarday John/PER Doe/PER said something amazing.
Washington/LOC D.C./LOC is the capital of the U.S.
The foobird is a special species of birds. It's commonly found on mars.
...
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
from config import *
def main():
"""Main function."""
# collect all unigrams (all labels, including "O")
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(UNIGRAMS_FILEPATH)
ug_all = None
# collect only unigrams of label PER
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
|
Add documentation, refactor to use config
|
Add documentation, refactor to use config
|
Python
|
mit
|
aleju/ner-crf
|
# -*- coding: utf-8 -*-
"""
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
ARTICLES_FILEPATH = "/media/aj/grab/nlp/corpus/processed/wikipedia-ner/annotated-fulltext.txt"
WRITE_UNIGRAMS_FILEPATH = os.path.join(CURRENT_DIR, "unigrams.txt")
WRITE_UNIGRAMS_PERSON_FILEPATH = os.path.join(CURRENT_DIR, "unigrams_per.txt")
def main():
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(WRITE_UNIGRAMS_FILEPATH)
ug_all = None
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(WRITE_UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
Add documentation, refactor to use config
|
# -*- coding: utf-8 -*-
"""
File to collect all unigrams and all name-unigrams (label PER) from a corpus file.
The corpus file must have one document/article per line. The words must be labeled in the
form word/LABEL.
Example file content:
Yestarday John/PER Doe/PER said something amazing.
Washington/LOC D.C./LOC is the capital of the U.S.
The foobird is a special species of birds. It's commonly found on mars.
...
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
from config import *
def main():
"""Main function."""
# collect all unigrams (all labels, including "O")
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(UNIGRAMS_FILEPATH)
ug_all = None
# collect only unigrams of label PER
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
|
<commit_before># -*- coding: utf-8 -*-
"""
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
ARTICLES_FILEPATH = "/media/aj/grab/nlp/corpus/processed/wikipedia-ner/annotated-fulltext.txt"
WRITE_UNIGRAMS_FILEPATH = os.path.join(CURRENT_DIR, "unigrams.txt")
WRITE_UNIGRAMS_PERSON_FILEPATH = os.path.join(CURRENT_DIR, "unigrams_per.txt")
def main():
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(WRITE_UNIGRAMS_FILEPATH)
ug_all = None
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(WRITE_UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
<commit_msg>Add documentation, refactor to use config<commit_after>
|
# -*- coding: utf-8 -*-
"""
File to collect all unigrams and all name-unigrams (label PER) from a corpus file.
The corpus file must have one document/article per line. The words must be labeled in the
form word/LABEL.
Example file content:
Yestarday John/PER Doe/PER said something amazing.
Washington/LOC D.C./LOC is the capital of the U.S.
The foobird is a special species of birds. It's commonly found on mars.
...
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
from config import *
def main():
"""Main function."""
# collect all unigrams (all labels, including "O")
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(UNIGRAMS_FILEPATH)
ug_all = None
# collect only unigrams of label PER
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
ARTICLES_FILEPATH = "/media/aj/grab/nlp/corpus/processed/wikipedia-ner/annotated-fulltext.txt"
WRITE_UNIGRAMS_FILEPATH = os.path.join(CURRENT_DIR, "unigrams.txt")
WRITE_UNIGRAMS_PERSON_FILEPATH = os.path.join(CURRENT_DIR, "unigrams_per.txt")
def main():
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(WRITE_UNIGRAMS_FILEPATH)
ug_all = None
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(WRITE_UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
Add documentation, refactor to use config# -*- coding: utf-8 -*-
"""
File to collect all unigrams and all name-unigrams (label PER) from a corpus file.
The corpus file must have one document/article per line. The words must be labeled in the
form word/LABEL.
Example file content:
Yestarday John/PER Doe/PER said something amazing.
Washington/LOC D.C./LOC is the capital of the U.S.
The foobird is a special species of birds. It's commonly found on mars.
...
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
from config import *
def main():
"""Main function."""
# collect all unigrams (all labels, including "O")
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(UNIGRAMS_FILEPATH)
ug_all = None
# collect only unigrams of label PER
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
|
<commit_before># -*- coding: utf-8 -*-
"""
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
ARTICLES_FILEPATH = "/media/aj/grab/nlp/corpus/processed/wikipedia-ner/annotated-fulltext.txt"
WRITE_UNIGRAMS_FILEPATH = os.path.join(CURRENT_DIR, "unigrams.txt")
WRITE_UNIGRAMS_PERSON_FILEPATH = os.path.join(CURRENT_DIR, "unigrams_per.txt")
def main():
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(WRITE_UNIGRAMS_FILEPATH)
ug_all = None
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(WRITE_UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
<commit_msg>Add documentation, refactor to use config<commit_after># -*- coding: utf-8 -*-
"""
File to collect all unigrams and all name-unigrams (label PER) from a corpus file.
The corpus file must have one document/article per line. The words must be labeled in the
form word/LABEL.
Example file content:
Yestarday John/PER Doe/PER said something amazing.
Washington/LOC D.C./LOC is the capital of the U.S.
The foobird is a special species of birds. It's commonly found on mars.
...
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
from config import *
def main():
"""Main function."""
# collect all unigrams (all labels, including "O")
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(UNIGRAMS_FILEPATH)
ug_all = None
# collect only unigrams of label PER
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
if __name__ == "__main__":
main()
|
400828c8606e614dcc11b1e7c7d1fb7336ab5082
|
corehq/apps/es/sms.py
|
corehq/apps/es/sms.py
|
from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
|
from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def user_facet(self, size=None):
return self.terms_facet('couch_recipient', 'user', size=size)
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
|
Add user facet for SMS
|
Add user facet for SMS
|
Python
|
bsd-3-clause
|
puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
Add user facet for SMS
|
from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def user_facet(self, size=None):
return self.terms_facet('couch_recipient', 'user', size=size)
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
|
<commit_before>from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
<commit_msg>Add user facet for SMS<commit_after>
|
from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def user_facet(self, size=None):
return self.terms_facet('couch_recipient', 'user', size=size)
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
|
from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
Add user facet for SMSfrom .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def user_facet(self, size=None):
return self.terms_facet('couch_recipient', 'user', size=size)
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
|
<commit_before>from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
<commit_msg>Add user facet for SMS<commit_after>from .es_query import HQESQuery
from . import filters
class SMSES(HQESQuery):
index = 'sms'
@property
def builtin_filters(self):
return [
incoming_messages,
outgoing_messages,
to_commcare_user,
to_commcare_case,
to_web_user,
to_couch_user,
to_commcare_user_or_case,
received,
] + super(SMSES, self).builtin_filters
def user_facet(self, size=None):
return self.terms_facet('couch_recipient', 'user', size=size)
def incoming_messages():
return filters.term("direction", "i")
def outgoing_messages():
return filters.term("direction", "o")
def to_commcare_user():
return filters.term("couch_recipient_doc_type", "commcareuser")
def to_commcare_case():
return filters.term("couch_recipient_doc_type", "commcarecase")
def to_web_user():
return filters.term("couch_recipient_doc_type", "webuser")
def to_couch_user():
return filters.term("couch_recipient_doc_type", "couchuser")
def to_commcare_user_or_case():
return filters.OR(to_commcare_user(), to_commcare_case())
def received(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date', gt, gte, lt, lte)
|
f5a7d49c18fe51c0600bf54b8cbf46a7cb8a0bca
|
mixmind/database.py
|
mixmind/database.py
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='manager', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
|
Use "owner" in place of "manager" for bar roles
|
Use "owner" in place of "manager" for bar roles
|
Python
|
apache-2.0
|
twschum/mix-mind,twschum/mix-mind,twschum/mix-mind,twschum/mix-mind
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='manager', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
Use "owner" in place of "manager" for bar roles
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
|
<commit_before>from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='manager', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
<commit_msg>Use "owner" in place of "manager" for bar roles<commit_after>
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
|
from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='manager', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
Use "owner" in place of "manager" for bar rolesfrom flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
|
<commit_before>from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='manager', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
<commit_msg>Use "owner" in place of "manager" for bar roles<commit_after>from flask_sqlalchemy import SQLAlchemy
from flask_alembic import Alembic
db = SQLAlchemy()
alembic = Alembic()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import models
import barstock
from authorization import user_datastore
db.create_all()
user_datastore.find_or_create_role(name='admin', description='An admin user may modify the parameters of the app backend')
user_datastore.find_or_create_role(name='bartender', description='This user is a bartender at at least one bar')
user_datastore.find_or_create_role(name='owner', description='This user can do limited management at one bar')
user_datastore.find_or_create_role(name='customer', description='Customer may register to make it easier to order drinks')
db.session.commit()
# now handle alembic revisions
alembic.revision('Convert columns to support unicode')
alembic.upgrade()
|
2a5cc95cc08f90659a6d18db3a9e60c02f7ec4b7
|
lib/servo_process.py
|
lib/servo_process.py
|
from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
|
from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
|
Move signal ignore to run
|
Move signal ignore to run
|
Python
|
mit
|
mlensment/rebot,mlensment/rebot
|
from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
Move signal ignore to run
|
from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
|
<commit_before>from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
<commit_msg>Move signal ignore to run<commit_after>
|
from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
|
from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
Move signal ignore to runfrom multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
|
<commit_before>from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
<commit_msg>Move signal ignore to run<commit_after>from multiprocessing import Process, Value
import os
import config
import servo
import signal
class ServoProcess(Process):
def __init__(self):
print '----> Checking servo driver...'
if not os.path.exists('/dev/servoblaster'):
raise Exception('Servo driver was not found. Is servoblaster loaded?')
else:
print '----> Servo driver loaded'
Process.__init__(self)
self.spoon = servo.Servo(config.SPOON_SERVO_ID)
self.leg = servo.Servo(config.LEG_SERVO_ID)
self.initialized = Value('b', False)
def run(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
print '----> Initiating servo calibration sequence...'
i = config.SERVO_MAX_WIDTH - config.SERVO_MIN_WIDTH
while(i > 0):
self.spoon.decrease_pwm(10)
self.leg.decrease_pwm(10)
i -= 10
print '----> Servo calibration complete'
self.initialized.value = True
while(1):
self.spoon.update()
self.spoon.alter_pwm()
self.leg.update()
self.leg.alter_pwm()
|
fbc0a83bc9a72c57ba56cecd9fa06e7e86ea7589
|
nbgrader/auth/base.py
|
nbgrader/auth/base.py
|
"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
super(BaseAuth, self).__init__(**kwargs)
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
|
"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
super(BaseAuth, self).__init__(**kwargs)
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
|
Set instances variables in auth before init
|
Set instances variables in auth before init
|
Python
|
bsd-3-clause
|
ellisonbg/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,ellisonbg/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader
|
"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
super(BaseAuth, self).__init__(**kwargs)
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
Set instances variables in auth before init
|
"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
super(BaseAuth, self).__init__(**kwargs)
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
|
<commit_before>"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
super(BaseAuth, self).__init__(**kwargs)
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
<commit_msg>Set instances variables in auth before init<commit_after>
|
"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
super(BaseAuth, self).__init__(**kwargs)
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
|
"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
super(BaseAuth, self).__init__(**kwargs)
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
Set instances variables in auth before init"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
super(BaseAuth, self).__init__(**kwargs)
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
|
<commit_before>"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
super(BaseAuth, self).__init__(**kwargs)
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
<commit_msg>Set instances variables in auth before init<commit_after>"""Base formgrade authenticator."""
from traitlets.config.configurable import LoggingConfigurable
class BaseAuth(LoggingConfigurable):
"""Base formgrade authenticator."""
def __init__(self, ip, port, base_directory, **kwargs):
self._ip = ip
self._port = port
self._base_url = ''
self._base_directory = base_directory
super(BaseAuth, self).__init__(**kwargs)
@property
def base_url(self):
return self._base_url
@property
def login_url(self):
return ''
def get_user(self, handler):
return 'nbgrader'
def authenticate(self, user):
"""Authenticate a user."""
return user
def notebook_server_exists(self):
"""Checks for a notebook server."""
return False
def get_notebook_server_cookie(self):
"""Gets a cookie that is needed to access the notebook server."""
return None
def get_notebook_url(self, relative_path):
"""Gets the notebook's url."""
raise NotImplementedError
def transform_handler(self, handler):
return handler
def stop(self):
"""Stops the notebook server."""
pass
|
4c813a82d0035c9f49e0b07f54150676c5dd8faf
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
|
#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
result = unittest2.TextTestRunner(verbosity=2).run(suite)
if len(result.errors) > 0 or len(result.failures) > 0:
sys.exit(1)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
|
Make sure the tests exit with status 1 when there are errors or failures
|
Make sure the tests exit with status 1 when there are errors or failures
|
Python
|
apache-2.0
|
google/gae-secure-scaffold-python,google/gae-secure-scaffold-python3,google/gae-secure-scaffold-python,google/gae-secure-scaffold-python
|
#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
Make sure the tests exit with status 1 when there are errors or failures
|
#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
result = unittest2.TextTestRunner(verbosity=2).run(suite)
if len(result.errors) > 0 or len(result.failures) > 0:
sys.exit(1)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
|
<commit_before>#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
<commit_msg>Make sure the tests exit with status 1 when there are errors or failures<commit_after>
|
#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
result = unittest2.TextTestRunner(verbosity=2).run(suite)
if len(result.errors) > 0 or len(result.failures) > 0:
sys.exit(1)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
|
#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
Make sure the tests exit with status 1 when there are errors or failures#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
result = unittest2.TextTestRunner(verbosity=2).run(suite)
if len(result.errors) > 0 or len(result.failures) > 0:
sys.exit(1)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
|
<commit_before>#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
<commit_msg>Make sure the tests exit with status 1 when there are errors or failures<commit_after>#!/usr/bin/python
import optparse
import sys
import unittest2
USAGE = """%prog SDK_PATH TEST_PATH <THIRD_PARTY>
Run unit tests for App Engine apps.
SDK_PATH Path to the SDK installation
TEST_PATH Path to package containing test modules
THIRD_PARTY Optional path to third party python modules to include."""
def main(sdk_path, test_path, third_party_path=None):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
if third_party_path:
sys.path.insert(0, third_party_path)
suite = unittest2.loader.TestLoader().discover(test_path,
pattern='*_test.py')
result = unittest2.TextTestRunner(verbosity=2).run(suite)
if len(result.errors) > 0 or len(result.failures) > 0:
sys.exit(1)
if __name__ == '__main__':
sys.dont_write_bytecode = True
parser = optparse.OptionParser(USAGE)
options, args = parser.parse_args()
if len(args) < 2:
print 'Error: At least 2 arguments required.'
parser.print_help()
sys.exit(1)
SDK_PATH = args[0]
TEST_PATH = args[1]
THIRD_PARTY_PATH = args[2] if len(args) > 2 else None
main(SDK_PATH, TEST_PATH, THIRD_PARTY_PATH)
|
66df7e17fa3eb2d8da2e56e39236b019f98a2a08
|
geokey_wegovnow/__init__.py
|
geokey_wegovnow/__init__.py
|
"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
Increment version number ahead of release
|
Increment version number ahead of release
|
Python
|
mit
|
ExCiteS/geokey-wegovnow,ExCiteS/geokey-wegovnow
|
"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
Increment version number ahead of release
|
"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
<commit_before>"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
<commit_msg>Increment version number ahead of release<commit_after>
|
"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
Increment version number ahead of release"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
<commit_before>"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
<commit_msg>Increment version number ahead of release<commit_after>"""Main initialization for the WeGovNow extension."""
VERSION = (3, 2, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_wegovnow',
'WeGovNow',
display_admin=False,
superuser=False,
version=__version__
)
except BaseException:
print 'Please install GeoKey first'
|
43a8043054dc5f942a04efe7273b3b1743db89ca
|
test.py
|
test.py
|
import time
import urllib
import RPi.GPIO as GPIO
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
GPIO.setmode(GPIO.BCM)
# Disable "Ports already in use" warning
GPIO.setwarnings(False)
# Set the pin to be an input
GPIO.setup(LPR_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and GPIO.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and GPIO.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
GPIO.cleanup()
|
import time
import urllib
import RPi.GPIO as gpio
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
gpio.setmode(gpio.BCM)
# Disable "Ports already in use" warning
gpio.setwarnings(False)
# Set the pin to be an input
gpio.setup(LPR_PIN, gpio.IN)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and gpio.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and gpio.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
gpio.cleanup()
|
Revert GPIO setup to be optimized for GPIO3 (with internal pull up resistor)
|
Revert GPIO setup to be optimized for GPIO3 (with internal pull up resistor)
|
Python
|
mit
|
adampiskorski/lpr_poc
|
import time
import urllib
import RPi.GPIO as GPIO
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
GPIO.setmode(GPIO.BCM)
# Disable "Ports already in use" warning
GPIO.setwarnings(False)
# Set the pin to be an input
GPIO.setup(LPR_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and GPIO.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and GPIO.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
GPIO.cleanup()Revert GPIO setup to be optimized for GPIO3 (with internal pull up resistor)
|
import time
import urllib
import RPi.GPIO as gpio
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
gpio.setmode(gpio.BCM)
# Disable "Ports already in use" warning
gpio.setwarnings(False)
# Set the pin to be an input
gpio.setup(LPR_PIN, gpio.IN)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and gpio.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and gpio.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
gpio.cleanup()
|
<commit_before>import time
import urllib
import RPi.GPIO as GPIO
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
GPIO.setmode(GPIO.BCM)
# Disable "Ports already in use" warning
GPIO.setwarnings(False)
# Set the pin to be an input
GPIO.setup(LPR_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and GPIO.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and GPIO.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
GPIO.cleanup()<commit_msg>Revert GPIO setup to be optimized for GPIO3 (with internal pull up resistor)<commit_after>
|
import time
import urllib
import RPi.GPIO as gpio
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
gpio.setmode(gpio.BCM)
# Disable "Ports already in use" warning
gpio.setwarnings(False)
# Set the pin to be an input
gpio.setup(LPR_PIN, gpio.IN)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and gpio.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and gpio.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
gpio.cleanup()
|
import time
import urllib
import RPi.GPIO as GPIO
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
GPIO.setmode(GPIO.BCM)
# Disable "Ports already in use" warning
GPIO.setwarnings(False)
# Set the pin to be an input
GPIO.setup(LPR_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and GPIO.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and GPIO.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
GPIO.cleanup()Revert GPIO setup to be optimized for GPIO3 (with internal pull up resistor)import time
import urllib
import RPi.GPIO as gpio
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
gpio.setmode(gpio.BCM)
# Disable "Ports already in use" warning
gpio.setwarnings(False)
# Set the pin to be an input
gpio.setup(LPR_PIN, gpio.IN)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and gpio.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and gpio.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
gpio.cleanup()
|
<commit_before>import time
import urllib
import RPi.GPIO as GPIO
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
GPIO.setmode(GPIO.BCM)
# Disable "Ports already in use" warning
GPIO.setwarnings(False)
# Set the pin to be an input
GPIO.setup(LPR_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and GPIO.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and GPIO.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
GPIO.cleanup()<commit_msg>Revert GPIO setup to be optimized for GPIO3 (with internal pull up resistor)<commit_after>import time
import urllib
import RPi.GPIO as gpio
# GPIO input pin to use
LPR_PIN = 3
# URL to get image from
SOURCE = 'http://192.168.0.13:8080/photoaf.jpg'
# Path to save image locally
FILE = 'img.jpg'
# Use GPIO pin numbers
gpio.setmode(gpio.BCM)
# Disable "Ports already in use" warning
gpio.setwarnings(False)
# Set the pin to be an input
gpio.setup(LPR_PIN, gpio.IN)
# Try statement to cleanup GPIO pins
try:
# Only save the image once per gate opening
captured = False
# Main loop
while True:
# Capture the image if not captured yet and switch is closed (open gate)
if not captured and gpio.input(LPR_PIN) is True:
urllib.urlretrieve(SOURCE, FILE)
print "Gate has been opened!"
captured = True
# If there was a capture and the switch is now open (closed gate) then
# ready the loop to capture again.
if captured and gpio.input(LPR_PIN) is False:
print "The gate has now closed!"
captured = False
time.sleep(1)
except KeyboardInterrupt:
gpio.cleanup()
|
8e5ad2138d0685e4322156b3f545be46a3f0c99f
|
util.py
|
util.py
|
#!/usr/bin/env python
import glob
import os.path
import random
def pick_random(directory, k=None):
"""Pick randomly some files from a directory."""
all_files = glob.glob(os.path.join(directory, '*'))
random.shuffle(all_files)
return all_files if k is None else all_files[:k]
|
#!/usr/bin/env python
import glob
import os.path
import random
import re
def pick(directory, k=None, randomized=True):
"""Pick some thread files from a thread directory."""
all_files = glob.glob(os.path.join(directory, '*'))
if randomized:
random.shuffle(all_files)
else:
pattern = '([0-9]+)\.txt'
all_files.sort(key=lambda f: int(re.search(pattern, f).group(1)))
return all_files if k is None else all_files[:k]
|
Modify to pick either randomly or sequentially
|
Modify to pick either randomly or sequentially
|
Python
|
mit
|
kemskems/otdet
|
#!/usr/bin/env python
import glob
import os.path
import random
def pick_random(directory, k=None):
"""Pick randomly some files from a directory."""
all_files = glob.glob(os.path.join(directory, '*'))
random.shuffle(all_files)
return all_files if k is None else all_files[:k]
Modify to pick either randomly or sequentially
|
#!/usr/bin/env python
import glob
import os.path
import random
import re
def pick(directory, k=None, randomized=True):
"""Pick some thread files from a thread directory."""
all_files = glob.glob(os.path.join(directory, '*'))
if randomized:
random.shuffle(all_files)
else:
pattern = '([0-9]+)\.txt'
all_files.sort(key=lambda f: int(re.search(pattern, f).group(1)))
return all_files if k is None else all_files[:k]
|
<commit_before>#!/usr/bin/env python
import glob
import os.path
import random
def pick_random(directory, k=None):
"""Pick randomly some files from a directory."""
all_files = glob.glob(os.path.join(directory, '*'))
random.shuffle(all_files)
return all_files if k is None else all_files[:k]
<commit_msg>Modify to pick either randomly or sequentially<commit_after>
|
#!/usr/bin/env python
import glob
import os.path
import random
import re
def pick(directory, k=None, randomized=True):
"""Pick some thread files from a thread directory."""
all_files = glob.glob(os.path.join(directory, '*'))
if randomized:
random.shuffle(all_files)
else:
pattern = '([0-9]+)\.txt'
all_files.sort(key=lambda f: int(re.search(pattern, f).group(1)))
return all_files if k is None else all_files[:k]
|
#!/usr/bin/env python
import glob
import os.path
import random
def pick_random(directory, k=None):
"""Pick randomly some files from a directory."""
all_files = glob.glob(os.path.join(directory, '*'))
random.shuffle(all_files)
return all_files if k is None else all_files[:k]
Modify to pick either randomly or sequentially#!/usr/bin/env python
import glob
import os.path
import random
import re
def pick(directory, k=None, randomized=True):
"""Pick some thread files from a thread directory."""
all_files = glob.glob(os.path.join(directory, '*'))
if randomized:
random.shuffle(all_files)
else:
pattern = '([0-9]+)\.txt'
all_files.sort(key=lambda f: int(re.search(pattern, f).group(1)))
return all_files if k is None else all_files[:k]
|
<commit_before>#!/usr/bin/env python
import glob
import os.path
import random
def pick_random(directory, k=None):
"""Pick randomly some files from a directory."""
all_files = glob.glob(os.path.join(directory, '*'))
random.shuffle(all_files)
return all_files if k is None else all_files[:k]
<commit_msg>Modify to pick either randomly or sequentially<commit_after>#!/usr/bin/env python
import glob
import os.path
import random
import re
def pick(directory, k=None, randomized=True):
"""Pick some thread files from a thread directory."""
all_files = glob.glob(os.path.join(directory, '*'))
if randomized:
random.shuffle(all_files)
else:
pattern = '([0-9]+)\.txt'
all_files.sort(key=lambda f: int(re.search(pattern, f).group(1)))
return all_files if k is None else all_files[:k]
|
1502fe9bfe428207706c2b8b684b36af884b74db
|
modules/development/superuser.py
|
modules/development/superuser.py
|
from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command(usage='<path1> <path2>')
@confirm.instance_owner()
async def coreswap(self, ctx):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
|
from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command()
@confirm.instance_owner()
async def coreswap(self, ctx, *, path1, path2):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
|
Add kwargs to this to make Jenkins shut up about it
|
Add kwargs to this to make Jenkins shut up about it
|
Python
|
mit
|
HexadecimalPython/Xeili,awau/Amethyst
|
from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command(usage='<path1> <path2>')
@confirm.instance_owner()
async def coreswap(self, ctx):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
Add kwargs to this to make Jenkins shut up about it
|
from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command()
@confirm.instance_owner()
async def coreswap(self, ctx, *, path1, path2):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
|
<commit_before>from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command(usage='<path1> <path2>')
@confirm.instance_owner()
async def coreswap(self, ctx):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
<commit_msg>Add kwargs to this to make Jenkins shut up about it<commit_after>
|
from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command()
@confirm.instance_owner()
async def coreswap(self, ctx, *, path1, path2):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
|
from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command(usage='<path1> <path2>')
@confirm.instance_owner()
async def coreswap(self, ctx):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
Add kwargs to this to make Jenkins shut up about itfrom utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command()
@confirm.instance_owner()
async def coreswap(self, ctx, *, path1, path2):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
|
<commit_before>from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command(usage='<path1> <path2>')
@confirm.instance_owner()
async def coreswap(self, ctx):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
<commit_msg>Add kwargs to this to make Jenkins shut up about it<commit_after>from utils.command_system import command
from utils import confirm
import discord
class SuperUser:
def __init__(self, amethyst):
self.amethyst = amethyst
@command()
@confirm.instance_owner()
async def coreswap(self, ctx, *, path1, path2):
"""Command to swap your core module.
Please note that this cog is in the devleopment folder,
meaning that it should NOT be used in your bot until completion.
It is far from complete and may contain a lot of bugs, Any bug report
regarding any modules from the Development folder will be dismissed."""
if not ctx.args:
return await ctx.send('No arguments given.')
try:
self.amethyst.holder.unload_module(path1)
self.amethyst.holder.load_module(path2)
await ctx.send('Core swap complete.')
except:
await ctx.send('Core swap failed!.')
def setup(amethyst):
return SuperUser(amethyst)
|
0da7e65a5def18ae6f2562b7e30d710b4a71de1a
|
repl.py
|
repl.py
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
#print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
#print(tree)
return tree.eval(context).data
except Exception as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
Reduce print statement console clustering
|
Reduce print statement console clustering
|
Python
|
mit
|
lnsp/tea,lnsp/tea
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
Reduce print statement console clustering
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
#print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
#print(tree)
return tree.eval(context).data
except Exception as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
<commit_msg>Reduce print statement console clustering<commit_after>
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
#print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
#print(tree)
return tree.eval(context).data
except Exception as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
Reduce print statement console clustering#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
#print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
#print(tree)
return tree.eval(context).data
except Exception as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
<commit_msg>Reduce print statement console clustering<commit_after>#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
#print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
#print(tree)
return tree.eval(context).data
except Exception as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
90db6286cdfb493de5dc944783aa65b4fbce38b8
|
ghettoq/backends/pyredis.py
|
ghettoq/backends/pyredis.py
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
DEFAULT_PORT = 6379
class RedisBackend(BaseBackend):
def establish_connection(self):
self.port = self.port or DEFAULT_PORT
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.lpush(queue, message)
def get(self, queue):
return self.client.rpop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
Use rpop/rpush instead of deprecated pop/push (conforming to redis 1.x)
|
Use rpop/rpush instead of deprecated pop/push (conforming to redis 1.x)
|
Python
|
bsd-3-clause
|
ask/ghettoq
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
Use rpop/rpush instead of deprecated pop/push (conforming to redis 1.x)
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
DEFAULT_PORT = 6379
class RedisBackend(BaseBackend):
def establish_connection(self):
self.port = self.port or DEFAULT_PORT
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.lpush(queue, message)
def get(self, queue):
return self.client.rpop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
<commit_before>from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
<commit_msg>Use rpop/rpush instead of deprecated pop/push (conforming to redis 1.x)<commit_after>
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
DEFAULT_PORT = 6379
class RedisBackend(BaseBackend):
def establish_connection(self):
self.port = self.port or DEFAULT_PORT
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.lpush(queue, message)
def get(self, queue):
return self.client.rpop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
Use rpop/rpush instead of deprecated pop/push (conforming to redis 1.x)from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
DEFAULT_PORT = 6379
class RedisBackend(BaseBackend):
def establish_connection(self):
self.port = self.port or DEFAULT_PORT
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.lpush(queue, message)
def get(self, queue):
return self.client.rpop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
<commit_before>from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
class RedisBackend(BaseBackend):
def establish_connection(self):
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.push(queue, message, head=False)
def get(self, queue):
return self.client.pop(queue)
def purge(self, queue):
return self.client.delete(queue)
<commit_msg>Use rpop/rpush instead of deprecated pop/push (conforming to redis 1.x)<commit_after>from redis import Redis as Redis
from ghettoq.backends.base import BaseBackend
DEFAULT_PORT = 6379
class RedisBackend(BaseBackend):
def establish_connection(self):
self.port = self.port or DEFAULT_PORT
return Redis(host=self.host, port=self.port, db=self.database,
password=self.password)
def put(self, queue, message):
self.client.lpush(queue, message)
def get(self, queue):
return self.client.rpop(queue)
def purge(self, queue):
return self.client.delete(queue)
|
02668b8dfda3c00f4ae74846d7c14c5dde64e17c
|
asciitree/__init__.py
|
asciitree/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def ascii_tree(node,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
Remove now obsolete ascii_tree function.
|
Remove now obsolete ascii_tree function.
|
Python
|
mit
|
mbr/asciitree
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def ascii_tree(node,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
Remove now obsolete ascii_tree function.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def ascii_tree(node,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
<commit_msg>Remove now obsolete ascii_tree function.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def ascii_tree(node,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
Remove now obsolete ascii_tree function.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def ascii_tree(node,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return _draw_tree(get_root(node), '', get_node_children, get_node_text)
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
<commit_msg>Remove now obsolete ascii_tree function.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from legacy import draw_tree, _draw_tree
def left_aligned(tree,
get_node_children=lambda t: t[1].items(),
get_node_text=lambda t: t[0],
get_root=lambda d: d.items()[0]):
return '\n'.join(_left_aligned(get_root(tree),
get_node_children,
get_node_text))
def _left_aligned(node, get_node_children, get_node_text):
lines = []
children = get_node_children(node)
lines.append(get_node_text(node))
for n, child in enumerate(children):
child_tree = _left_aligned(child, get_node_children, get_node_text)
if n == len(children) - 1:
# last child does not get the line drawn
lines.append(' +--' + child_tree.pop(0))
prefix = ' '
else:
lines.append(' +--' + child_tree.pop(0))
prefix = ' |'
child_tree = [prefix + l for l in child_tree]
lines.extend(child_tree)
return lines
|
77579ff7d7a63539d350c40d49eedeb21e61bd61
|
acute/schema.py
|
acute/schema.py
|
"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
Make Diagnosis come before PMH
|
Make Diagnosis come before PMH
closes #5
|
Python
|
agpl-3.0
|
openhealthcare/acute,openhealthcare/acute,openhealthcare/acute
|
"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
Make Diagnosis come before PMH
closes #5
|
"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
<commit_before>"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
<commit_msg>Make Diagnosis come before PMH
closes #5<commit_after>
|
"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
Make Diagnosis come before PMH
closes #5"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
<commit_before>"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
<commit_msg>Make Diagnosis come before PMH
closes #5<commit_after>"""
Define acute schemas.
"""
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
f7788bf9cb2d8d762689a24b63aaeaec3f076d72
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(application)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
|
from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(
application,
verify_certs=True
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
|
Validate TLS certificate for connections to Elastsearch
|
Validate TLS certificate for connections to Elastsearch
By default Python elasticsearch client 1.x sets verify_certs to
False, which means it doesn't verify TLS certificates when connecting
to Elasticsearch over HTTPS and urllib3 prints an InsecureRequestWarning
on each request.
Setting `verify_certs=True` explicitly removes the warning and prevents
connections if the certificate doesn't validate.
|
Python
|
mit
|
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
|
from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(application)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
Validate TLS certificate for connections to Elastsearch
By default Python elasticsearch client 1.x sets verify_certs to
False, which means it doesn't verify TLS certificates when connecting
to Elasticsearch over HTTPS and urllib3 prints an InsecureRequestWarning
on each request.
Setting `verify_certs=True` explicitly removes the warning and prevents
connections if the certificate doesn't validate.
|
from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(
application,
verify_certs=True
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
|
<commit_before>from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(application)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
<commit_msg>Validate TLS certificate for connections to Elastsearch
By default Python elasticsearch client 1.x sets verify_certs to
False, which means it doesn't verify TLS certificates when connecting
to Elasticsearch over HTTPS and urllib3 prints an InsecureRequestWarning
on each request.
Setting `verify_certs=True` explicitly removes the warning and prevents
connections if the certificate doesn't validate.<commit_after>
|
from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(
application,
verify_certs=True
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
|
from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(application)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
Validate TLS certificate for connections to Elastsearch
By default Python elasticsearch client 1.x sets verify_certs to
False, which means it doesn't verify TLS certificates when connecting
to Elasticsearch over HTTPS and urllib3 prints an InsecureRequestWarning
on each request.
Setting `verify_certs=True` explicitly removes the warning and prevents
connections if the certificate doesn't validate.from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(
application,
verify_certs=True
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
|
<commit_before>from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(application)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
<commit_msg>Validate TLS certificate for connections to Elastsearch
By default Python elasticsearch client 1.x sets verify_certs to
False, which means it doesn't verify TLS certificates when connecting
to Elasticsearch over HTTPS and urllib3 prints an InsecureRequestWarning
on each request.
Setting `verify_certs=True` explicitly removes the warning and prevents
connections if the certificate doesn't validate.<commit_after>from flask import Flask
from config import config as configs
from flask.ext.bootstrap import Bootstrap
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
bootstrap = Bootstrap()
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
bootstrap=bootstrap,
feature_flags=feature_flags
)
elasticsearch_client.init_app(
application,
verify_certs=True
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
if configs[config_name].ALLOW_EXPLORER:
from .explorer import explorer as explorer_blueprint
application.register_blueprint(explorer_blueprint)
return application
|
f196171b15c137352a628cf9690b3bf9f8590ca7
|
author/forms.py
|
author/forms.py
|
from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug']
|
from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug', 'bio']
|
Exclude the proxy bio field.
|
Exclude the proxy bio field.
|
Python
|
bsd-3-clause
|
PARINetwork/pari,PARINetwork/pari,PARINetwork/pari,PARINetwork/pari
|
from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug']
Exclude the proxy bio field.
|
from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug', 'bio']
|
<commit_before>from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug']
<commit_msg>Exclude the proxy bio field.<commit_after>
|
from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug', 'bio']
|
from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug']
Exclude the proxy bio field.from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug', 'bio']
|
<commit_before>from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug']
<commit_msg>Exclude the proxy bio field.<commit_after>from django import forms
from wagtail.wagtailimages.widgets import AdminImageChooser
from .models import Author
class AuthorAdminForm(forms.ModelForm):
class Meta:
model = Author
# TODO: Ability to add author image
exclude = ['image', 'slug', 'bio']
|
399b7b03e29364beeb7abe3926deb42b99f34047
|
prawcore/requestor.py
|
prawcore/requestor.py
|
"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
ttps://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
|
"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
https://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
|
Add missing 'h' in https in Requestor doc.
|
Add missing 'h' in https in Requestor doc.
|
Python
|
bsd-2-clause
|
praw-dev/prawcore
|
"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
ttps://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
Add missing 'h' in https in Requestor doc.
|
"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
https://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
|
<commit_before>"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
ttps://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
<commit_msg>Add missing 'h' in https in Requestor doc.<commit_after>
|
"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
https://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
|
"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
ttps://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
Add missing 'h' in https in Requestor doc."""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
https://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
|
<commit_before>"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
ttps://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
<commit_msg>Add missing 'h' in https in Requestor doc.<commit_after>"""Provides the HTTP request handling interface."""
import requests
from . import const
class Requestor(object):
"""Requestor provides an interface to HTTP requests."""
def __init__(self, user_agent):
"""Create an instance of the Requestor class.
:param user_agent: The user-agent for your application. Please follow
reddit's user-agent guidlines:
https://github.com/reddit/reddit/wiki/API#rules
"""
self._http = requests.Session()
self._http.headers['User-Agent'] = '{} {}'.format(
user_agent, const.USER_AGENT)
def __getattr__(self, attribute):
"""Pass all undefined attributes to the _http attribute."""
return getattr(self._http, attribute)
|
2ad2d488b4d7b0997355c068646a6a38b2668dae
|
meetuppizza/tests.py
|
meetuppizza/tests.py
|
from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
|
from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_page_contains_pizza(self):
response = self.client.get('/')
self.assertContains(response, "Pizza")
|
Add test that checks if landing page contains the word Pizza.
|
Add test that checks if landing page contains the word Pizza.
|
Python
|
mit
|
nicole-a-tesla/meetup.pizza,nicole-a-tesla/meetup.pizza
|
from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
Add test that checks if landing page contains the word Pizza.
|
from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_page_contains_pizza(self):
response = self.client.get('/')
self.assertContains(response, "Pizza")
|
<commit_before>from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
<commit_msg>Add test that checks if landing page contains the word Pizza.<commit_after>
|
from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_page_contains_pizza(self):
response = self.client.get('/')
self.assertContains(response, "Pizza")
|
from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
Add test that checks if landing page contains the word Pizza.from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_page_contains_pizza(self):
response = self.client.get('/')
self.assertContains(response, "Pizza")
|
<commit_before>from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
<commit_msg>Add test that checks if landing page contains the word Pizza.<commit_after>from django.test import TestCase
class Test(TestCase):
def test_landing_page_is_there(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_page_contains_pizza(self):
response = self.client.get('/')
self.assertContains(response, "Pizza")
|
94cd92d7afc39e832fb1d0d570ae007a9e4dab21
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
)
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
scripts=[
'scripts/run_ilamb.sh'
],
)
|
Install run_ilamb.sh to Python distro bin directory
|
Install run_ilamb.sh to Python distro bin directory
|
Python
|
mit
|
permamodel/ILAMB,permamodel/ILAMB,permamodel/ILAMB
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
)
Install run_ilamb.sh to Python distro bin directory
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
scripts=[
'scripts/run_ilamb.sh'
],
)
|
<commit_before>#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
)
<commit_msg>Install run_ilamb.sh to Python distro bin directory<commit_after>
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
scripts=[
'scripts/run_ilamb.sh'
],
)
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
)
Install run_ilamb.sh to Python distro bin directory#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
scripts=[
'scripts/run_ilamb.sh'
],
)
|
<commit_before>#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
)
<commit_msg>Install run_ilamb.sh to Python distro bin directory<commit_after>#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pylamb import __version__
setup(name='ILAMB',
version=__version__,
author='Mark Piper',
author_email='mark.piper@colorado.edu',
description='Python BMI for ILAMB',
long_description=open('README.md').read(),
packages=find_packages(),
scripts=[
'scripts/run_ilamb.sh'
],
)
|
4607a2f5575a42c8e12da7dbc9a1e8ae27398fef
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']})
|
from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']},
scripts=['luigi-wrapper', 'scripts/submit-experiments-from-gsheet'])
|
Install luigi-wrapper and submit-experiment-from-gsheet scripts
|
Install luigi-wrapper and submit-experiment-from-gsheet scripts
|
Python
|
unlicense
|
ppavlidis/rnaseq-pipeline,ppavlidis/rnaseq-pipeline,ppavlidis/rnaseq-pipeline
|
from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']})
Install luigi-wrapper and submit-experiment-from-gsheet scripts
|
from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']},
scripts=['luigi-wrapper', 'scripts/submit-experiments-from-gsheet'])
|
<commit_before>from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']})
<commit_msg>Install luigi-wrapper and submit-experiment-from-gsheet scripts<commit_after>
|
from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']},
scripts=['luigi-wrapper', 'scripts/submit-experiments-from-gsheet'])
|
from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']})
Install luigi-wrapper and submit-experiment-from-gsheet scriptsfrom setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']},
scripts=['luigi-wrapper', 'scripts/submit-experiments-from-gsheet'])
|
<commit_before>from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']})
<commit_msg>Install luigi-wrapper and submit-experiment-from-gsheet scripts<commit_after>from setuptools import setup, find_packages
setup(name='rnaseq_pipeline',
version='2.0.0',
description='RNA-Seq pipeline for the Pavlidis Lab',
long_description='file: README.md',
author='Guillaume Poirier-Morency',
author_email='poirigui@msl.ubc.ca',
packages=find_packages(),
install_requires=['luigi', 'bioluigi', 'PyYAML', 'requests', 'pandas'],
extras_require={
'gsheet': ['google-api-python-client', 'google-auth-httplib2', 'google-auth-oauthlib', 'pyxdg'],
'webviewer': ['Flask']},
scripts=['luigi-wrapper', 'scripts/submit-experiments-from-gsheet'])
|
f32c62c63a3339ef7351c54009eb515973747dd1
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5,<0.6',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
|
Allow higher versions of Oscar
|
Allow higher versions of Oscar
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar-stores,django-oscar/django-oscar-stores,django-oscar/django-oscar-stores
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5,<0.6',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
Allow higher versions of Oscar
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5,<0.6',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
<commit_msg>Allow higher versions of Oscar<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5,<0.6',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
Allow higher versions of Oscar#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5,<0.6',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
<commit_msg>Allow higher versions of Oscar<commit_after>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version="0.5-dev",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(
os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django-oscar>=0.5',
'requests>=1.1,<1.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
])
|
ec3f3187aef144dc4402646eaef64f9f41c7df0a
|
setup.py
|
setup.py
|
import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.10'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
|
import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.11'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
|
Set cov-core dependency to 1.11
|
Set cov-core dependency to 1.11
|
Python
|
mit
|
pytest-dev/pytest-cov,moreati/pytest-cov,ionelmc/pytest-cover,wushaobo/pytest-cov,schlamar/pytest-cov,opoplawski/pytest-cov
|
import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.10'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
Set cov-core dependency to 1.11
|
import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.11'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
|
<commit_before>import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.10'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
<commit_msg>Set cov-core dependency to 1.11<commit_after>
|
import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.11'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
|
import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.10'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
Set cov-core dependency to 1.11import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.11'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
|
<commit_before>import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.10'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
<commit_msg>Set cov-core dependency to 1.11<commit_after>import setuptools
setuptools.setup(name='pytest-cov',
version='1.6',
description='py.test plugin for coverage reporting with '
'support for both centralised and distributed testing, '
'including subprocesses and multiprocessing',
long_description=open('README.rst').read().strip(),
author='Marc Schlaich',
author_email='marc.schlaich@gmail.com',
url='https://github.com/schlamar/pytest-cov',
py_modules=['pytest_cov'],
install_requires=['pytest>=2.5.2',
'cov-core>=1.11'],
entry_points={'pytest11': ['pytest_cov = pytest_cov']},
license='MIT License',
zip_safe=False,
keywords='py.test pytest cover coverage distributed parallel',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Topic :: Software Development :: Testing'])
|
5c1adfbb6cbd18253d7c846be5639c4e60087125
|
setup.py
|
setup.py
|
import sys
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
|
import sys
import re
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
description = re.sub(r'\[!\[.+\].+\]\(.+\)', '', description)
description = '\n'.join(description.splitlines()[2:])
description = re.sub('\n{2,}', '\n\n', description).strip()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
|
Clean up the description for PyPI
|
Clean up the description for PyPI
|
Python
|
mit
|
prophile/circle-asset
|
import sys
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
Clean up the description for PyPI
|
import sys
import re
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
description = re.sub(r'\[!\[.+\].+\]\(.+\)', '', description)
description = '\n'.join(description.splitlines()[2:])
description = re.sub('\n{2,}', '\n\n', description).strip()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
|
<commit_before>import sys
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
<commit_msg>Clean up the description for PyPI<commit_after>
|
import sys
import re
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
description = re.sub(r'\[!\[.+\].+\]\(.+\)', '', description)
description = '\n'.join(description.splitlines()[2:])
description = re.sub('\n{2,}', '\n\n', description).strip()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
|
import sys
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
Clean up the description for PyPIimport sys
import re
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
description = re.sub(r'\[!\[.+\].+\]\(.+\)', '', description)
description = '\n'.join(description.splitlines()[2:])
description = re.sub('\n{2,}', '\n\n', description).strip()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
|
<commit_before>import sys
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
<commit_msg>Clean up the description for PyPI<commit_after>import sys
import re
if sys.version < '3.4':
print('Sorry, this is not a compatible version of Python. Use 3.4 or later.')
exit(1)
from setuptools import setup, find_packages
with open('README.md') as f:
description = f.read()
description = re.sub(r'\[!\[.+\].+\]\(.+\)', '', description)
description = '\n'.join(description.splitlines()[2:])
description = re.sub('\n{2,}', '\n\n', description).strip()
from circle_asset.version import VERSION, SHORT_DESCRIPTION
setup(name='circle-asset',
version=VERSION,
description=SHORT_DESCRIPTION,
author='Alistair Lynn',
author_email='arplynn@gmail.com',
license="MIT",
long_description=description,
url='https://github.com/prophile/circle-asset',
zip_safe=True,
setup_requires=['nose >=1, <2'],
install_requires=[
'requests >=2.5, <3'
],
entry_points={'console_scripts': [
'circle-asset=circle_asset.cli:main'
]},
packages=find_packages(),
test_suite='nose.collector')
|
190a5eb43138850f7d8bb3cd959f02c4cd8cea25
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx'
]
}
)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx',
'ghp-import'
]
}
)
|
Add missing ghp-import requirement as an extra dev dependency.
|
Add missing ghp-import requirement as an extra dev dependency.
|
Python
|
mit
|
TkTech/Jawa,TkTech/Jawa
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx'
]
}
)
Add missing ghp-import requirement as an extra dev dependency.
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx',
'ghp-import'
]
}
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx'
]
}
)
<commit_msg>Add missing ghp-import requirement as an extra dev dependency.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx',
'ghp-import'
]
}
)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx'
]
}
)
Add missing ghp-import requirement as an extra dev dependency.#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx',
'ghp-import'
]
}
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx'
]
}
)
<commit_msg>Add missing ghp-import requirement as an extra dev dependency.<commit_after>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
tests_require=[
'pytest'
],
extras_require={
'dev': [
'pytest',
'pytest-cov',
'sphinx',
'ghp-import'
]
}
)
|
aa4c539c36d56e6f7bb87d312191e3788f363b52
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy==1.15.4', # FIXME: temporary because of bug in numpy 1.16
'pyqt5>=5.10.1'
]
)
|
from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy>=1.15.0',
'pyqt5>=5.10.1'
]
)
|
Remove exact numpy version requirement
|
Remove exact numpy version requirement
|
Python
|
apache-2.0
|
maximecb/gym-minigrid
|
from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy==1.15.4', # FIXME: temporary because of bug in numpy 1.16
'pyqt5>=5.10.1'
]
)
Remove exact numpy version requirement
|
from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy>=1.15.0',
'pyqt5>=5.10.1'
]
)
|
<commit_before>from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy==1.15.4', # FIXME: temporary because of bug in numpy 1.16
'pyqt5>=5.10.1'
]
)
<commit_msg>Remove exact numpy version requirement<commit_after>
|
from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy>=1.15.0',
'pyqt5>=5.10.1'
]
)
|
from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy==1.15.4', # FIXME: temporary because of bug in numpy 1.16
'pyqt5>=5.10.1'
]
)
Remove exact numpy version requirementfrom setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy>=1.15.0',
'pyqt5>=5.10.1'
]
)
|
<commit_before>from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy==1.15.4', # FIXME: temporary because of bug in numpy 1.16
'pyqt5>=5.10.1'
]
)
<commit_msg>Remove exact numpy version requirement<commit_after>from setuptools import setup
setup(
name='gym_minigrid',
version='0.0.1',
keywords='memory, environment, agent, rl, openaigym, openai-gym, gym',
packages=['gym_minigrid', 'gym_minigrid.envs'],
install_requires=[
'gym>=0.9.6',
'numpy>=1.15.0',
'pyqt5>=5.10.1'
]
)
|
e654cea816be8c4a79da66efbc50a5698a51ba5b
|
plantcv/plantcv/print_results.py
|
plantcv/plantcv/print_results.py
|
# Print Numerical Data
import json
import os
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
if os.path.isfile(filename):
with open(filename, 'r') as f:
hierarchical_data = json.load(f)
hierarchical_data["observations"] = outputs.observations
else:
hierarchical_data = {"metadata": {}, "observations": outputs.observations}
with open(filename, mode='w') as f:
json.dump(hierarchical_data, f)
|
# Print Numerical Data
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
print("""Deprecation warning: plantcv.print_results will be removed in a future version.
Please use plantcv.outputs.save_results instead.
""")
outputs.save_results(filename=filename, outformat="json")
|
Add deprecation warning and use new method
|
Add deprecation warning and use new method
|
Python
|
mit
|
stiphyMT/plantcv,danforthcenter/plantcv,danforthcenter/plantcv,stiphyMT/plantcv,stiphyMT/plantcv,danforthcenter/plantcv
|
# Print Numerical Data
import json
import os
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
if os.path.isfile(filename):
with open(filename, 'r') as f:
hierarchical_data = json.load(f)
hierarchical_data["observations"] = outputs.observations
else:
hierarchical_data = {"metadata": {}, "observations": outputs.observations}
with open(filename, mode='w') as f:
json.dump(hierarchical_data, f)
Add deprecation warning and use new method
|
# Print Numerical Data
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
print("""Deprecation warning: plantcv.print_results will be removed in a future version.
Please use plantcv.outputs.save_results instead.
""")
outputs.save_results(filename=filename, outformat="json")
|
<commit_before># Print Numerical Data
import json
import os
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
if os.path.isfile(filename):
with open(filename, 'r') as f:
hierarchical_data = json.load(f)
hierarchical_data["observations"] = outputs.observations
else:
hierarchical_data = {"metadata": {}, "observations": outputs.observations}
with open(filename, mode='w') as f:
json.dump(hierarchical_data, f)
<commit_msg>Add deprecation warning and use new method<commit_after>
|
# Print Numerical Data
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
print("""Deprecation warning: plantcv.print_results will be removed in a future version.
Please use plantcv.outputs.save_results instead.
""")
outputs.save_results(filename=filename, outformat="json")
|
# Print Numerical Data
import json
import os
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
if os.path.isfile(filename):
with open(filename, 'r') as f:
hierarchical_data = json.load(f)
hierarchical_data["observations"] = outputs.observations
else:
hierarchical_data = {"metadata": {}, "observations": outputs.observations}
with open(filename, mode='w') as f:
json.dump(hierarchical_data, f)
Add deprecation warning and use new method# Print Numerical Data
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
print("""Deprecation warning: plantcv.print_results will be removed in a future version.
Please use plantcv.outputs.save_results instead.
""")
outputs.save_results(filename=filename, outformat="json")
|
<commit_before># Print Numerical Data
import json
import os
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
if os.path.isfile(filename):
with open(filename, 'r') as f:
hierarchical_data = json.load(f)
hierarchical_data["observations"] = outputs.observations
else:
hierarchical_data = {"metadata": {}, "observations": outputs.observations}
with open(filename, mode='w') as f:
json.dump(hierarchical_data, f)
<commit_msg>Add deprecation warning and use new method<commit_after># Print Numerical Data
from plantcv.plantcv import outputs
def print_results(filename):
"""Print result table
Inputs:
filename = filename
:param filename: str
:return:
"""
print("""Deprecation warning: plantcv.print_results will be removed in a future version.
Please use plantcv.outputs.save_results instead.
""")
outputs.save_results(filename=filename, outformat="json")
|
90f5ae3d3089f622694169d3327d932a68d3019d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=0.3.1,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
from fuzzywuzzy import __version__
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=__version__,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
|
Use __version__ from fuzzywuzzy package
|
Use __version__ from fuzzywuzzy package
|
Python
|
mit
|
zhahaoyu/fuzzywuzzy,pombredanne/fuzzywuzzy,beni55/fuzzywuzzy,medecau/fuzzywuzzy,zhahaoyu/fuzzywuzzy,blakejennings/fuzzywuzzy,beni55/fuzzywuzzy,salilnavgire/fuzzywuzzy,jayhetee/fuzzywuzzy,shalecraig/fuzzywuzzy,jayhetee/fuzzywuzzy,aeeilllmrx/fuzzywuzzy,shalecraig/fuzzywuzzy,aeeilllmrx/fuzzywuzzy,pombredanne/fuzzywuzzy,blakejennings/fuzzywuzzy,salilnavgire/fuzzywuzzy
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=0.3.1,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
Use __version__ from fuzzywuzzy package
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
from fuzzywuzzy import __version__
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=__version__,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=0.3.1,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
<commit_msg>Use __version__ from fuzzywuzzy package<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
from fuzzywuzzy import __version__
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=__version__,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=0.3.1,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
Use __version__ from fuzzywuzzy package#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
from fuzzywuzzy import __version__
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=__version__,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=0.3.1,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
<commit_msg>Use __version__ from fuzzywuzzy package<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 SeatGeek
# This file is part of fuzzywuzzy.
from fuzzywuzzy import __version__
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='fuzzywuzzy',
version=__version__,
author='Adam Cohen',
author_email='adam@seatgeek.com',
packages=['fuzzywuzzy'],
url='https://github.com/seatgeek/fuzzywuzzy',
license=open('LICENSE.txt').read(),
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3'
],
description='Fuzzy string matching in python',
long_description=open_file('README.rst').read(),
zip_safe=True,
)
|
ee1c890df7c2c86192b68bd442e41226f70a3850
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Remove the obsolte comment, library is licensed under BSD.
|
Remove the obsolte comment, library is licensed under BSD.
|
Python
|
bsd-3-clause
|
Liuftvafas/python-face-client,Kami/python-face-client
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Remove the obsolte comment, library is licensed under BSD.
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before># -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Remove the obsolte comment, library is licensed under BSD.<commit_after>
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Remove the obsolte comment, library is licensed under BSD.# -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before># -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Remove the obsolte comment, library is licensed under BSD.<commit_after># -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='tomaz@tomaz.me',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
421c28b295b59583eaad1d58de84a6350b883e2c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
"pytz>=2015.2",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
|
Add pytz as a dependency
|
Add pytz as a dependency
|
Python
|
bsd-3-clause
|
thenewguy/django-modelcluster,torchbox/django-modelcluster,wagtail/django-modelcluster,theju/django-modelcluster
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
Add pytz as a dependency
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
"pytz>=2015.2",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
<commit_msg>Add pytz as a dependency<commit_after>
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
"pytz>=2015.2",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
Add pytz as a dependency#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
"pytz>=2015.2",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
<commit_msg>Add pytz as a dependency<commit_after>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='django-modelcluster',
version='0.5',
description="Django extension to allow working with 'clusters' of models as a single unit, independently of the database",
author='Matthew Westcott',
author_email='matthew.westcott@torchbox.com',
url='https://github.com/torchbox/django-modelcluster',
packages=['modelcluster', 'tests'],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
"six>=1.6.1",
"pytz>=2015.2",
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
)
|
673c88a6ca2eb9917f8e006fa334171ec2f202d2
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.3.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.4.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
|
Add compatibility with Tornado 4.3
|
Add compatibility with Tornado 4.3
|
Python
|
mit
|
globocom/tornado-es,scorphus/tornado-es,scorphus/tornado-es,globocom/tornado-es
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.3.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
Add compatibility with Tornado 4.3
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.4.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.3.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
<commit_msg>Add compatibility with Tornado 4.3<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.4.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.3.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
Add compatibility with Tornado 4.3# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.4.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.3.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
<commit_msg>Add compatibility with Tornado 4.3<commit_after># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
VERSION = '2.4.1'
setup(
name='tornadoes',
version=VERSION,
description="A tornado-powered python library that provides asynchronous access to elasticsearch.",
long_description="""\
A tornado-powered python library that provides asynchronous access to elasticsearch.""",
author='Team Search of globo.com',
author_email='busca@corp.globo.com',
url='http://github.com/globocom/tornado-es',
download_url='http://github.com/globocom/tornado-es',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'testes']),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Unix',
'Operating System :: OS Independent',
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
],
include_package_data=True,
zip_safe=True,
install_requires=[
'tornado>=3.0.0,<4.4.0',
'six>=1.7.3',
],
tests_require=[
'unittest2',
'nose'
],
dependency_links=[],
)
|
480095562716bff8efaf3d01fa5b993f84965b63
|
setup.py
|
setup.py
|
# coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
|
# coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['mypy', 'pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
|
Add mypy to the development dependencies.
|
Add mypy to the development dependencies.
The code type-checks clean and this will allow keeping it so.
|
Python
|
lgpl-2.1
|
rosjat/python-scsi
|
# coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
Add mypy to the development dependencies.
The code type-checks clean and this will allow keeping it so.
|
# coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['mypy', 'pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
|
<commit_before># coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
<commit_msg>Add mypy to the development dependencies.
The code type-checks clean and this will allow keeping it so.<commit_after>
|
# coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['mypy', 'pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
|
# coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
Add mypy to the development dependencies.
The code type-checks clean and this will allow keeping it so.# coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['mypy', 'pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
|
<commit_before># coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
<commit_msg>Add mypy to the development dependencies.
The code type-checks clean and this will allow keeping it so.<commit_after># coding: utf-8
from setuptools import find_packages, setup
setup(
packages=find_packages(),
python_requires='~=3.7',
extras_require={
'dev': ['mypy', 'pytest'],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
|
935657aa643de17c04ea15ddb8af9aee27a05516
|
setup.py
|
setup.py
|
"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
NOW = datetime.now().strftime("%m/%d/%Y%H%M%S")
VERSION = f"1.0.0-dev{NOW}"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
|
"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
VERSION = "1.0.0"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
|
Set the final version number
|
Set the final version number
|
Python
|
mit
|
rfverbruggen/rachiopy
|
"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
NOW = datetime.now().strftime("%m/%d/%Y%H%M%S")
VERSION = f"1.0.0-dev{NOW}"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
Set the final version number
|
"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
VERSION = "1.0.0"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
|
<commit_before>"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
NOW = datetime.now().strftime("%m/%d/%Y%H%M%S")
VERSION = f"1.0.0-dev{NOW}"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
<commit_msg>Set the final version number<commit_after>
|
"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
VERSION = "1.0.0"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
|
"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
NOW = datetime.now().strftime("%m/%d/%Y%H%M%S")
VERSION = f"1.0.0-dev{NOW}"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
Set the final version number"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
VERSION = "1.0.0"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
|
<commit_before>"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
NOW = datetime.now().strftime("%m/%d/%Y%H%M%S")
VERSION = f"1.0.0-dev{NOW}"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
<commit_msg>Set the final version number<commit_after>"""Rachiopy setup script."""
from setuptools import find_packages, setup
from datetime import datetime
VERSION = "1.0.0"
GITHUB_USERNAME = "rfverbruggen"
GITHUB_REPOSITORY = "rachiopy"
GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz"
PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="RachioPy",
version=VERSION,
author="Robbert Verbruggen",
author_email="rfverbruggen@icloud.com",
packages=PACKAGES,
install_requires=["requests"],
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
license="MIT",
description="A Python module for the Rachio API.",
platforms="Cross Platform",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Topic :: Software Development",
],
)
|
e9b0a37ff5233dba3962aebfb1d28d78d4aa49ab
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
|
from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.internal', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
|
Revert "We shouldnt need to export calexicon.internal."
|
Revert "We shouldnt need to export calexicon.internal."
This reverts commit 4b63ae5469b5b1785ce4a6dbbcef4c4ff6505b0b.
We do need to export this package to call it internally.
|
Python
|
apache-2.0
|
jwg4/qual,jwg4/calexicon
|
from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
Revert "We shouldnt need to export calexicon.internal."
This reverts commit 4b63ae5469b5b1785ce4a6dbbcef4c4ff6505b0b.
We do need to export this package to call it internally.
|
from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.internal', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
|
<commit_before>from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
<commit_msg>Revert "We shouldnt need to export calexicon.internal."
This reverts commit 4b63ae5469b5b1785ce4a6dbbcef4c4ff6505b0b.
We do need to export this package to call it internally.<commit_after>
|
from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.internal', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
|
from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
Revert "We shouldnt need to export calexicon.internal."
This reverts commit 4b63ae5469b5b1785ce4a6dbbcef4c4ff6505b0b.
We do need to export this package to call it internally.from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.internal', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
|
<commit_before>from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
<commit_msg>Revert "We shouldnt need to export calexicon.internal."
This reverts commit 4b63ae5469b5b1785ce4a6dbbcef4c4ff6505b0b.
We do need to export this package to call it internally.<commit_after>from setuptools import setup
setup(
name='calexicon',
version='0.1',
description='Calendar stuff',
url='http://github.com/jwg4/calexicon',
author='Jack Grahl',
author_email='jack.grahl@yahoo.co.uk',
license='Apache License 2.0',
packages=['calexicon', 'calexicon.calendars', 'calexicon.internal', 'calexicon.dates', 'calexicon.fn'],
test_suite='nose.collector',
tests_require=['nose', 'hypothesis']
)
|
cb0a784f031383d23c1fffc318a6145d9d848a47
|
setup.py
|
setup.py
|
from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
include_package_data=True,
scripts=['bin/azulejo'],
)
|
from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
package_data={
'azulejo': ['*.json'],
},
include_package_data=True,
scripts=['bin/azulejo'],
)
|
Include JSON config files when installing
|
Include JSON config files when installing
|
Python
|
mit
|
gillesB/azulejo,neingeist/azulejo,neingeist/azulejo,gillesB/azulejo
|
from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
include_package_data=True,
scripts=['bin/azulejo'],
)
Include JSON config files when installing
|
from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
package_data={
'azulejo': ['*.json'],
},
include_package_data=True,
scripts=['bin/azulejo'],
)
|
<commit_before>from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
include_package_data=True,
scripts=['bin/azulejo'],
)
<commit_msg>Include JSON config files when installing<commit_after>
|
from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
package_data={
'azulejo': ['*.json'],
},
include_package_data=True,
scripts=['bin/azulejo'],
)
|
from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
include_package_data=True,
scripts=['bin/azulejo'],
)
Include JSON config files when installingfrom setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
package_data={
'azulejo': ['*.json'],
},
include_package_data=True,
scripts=['bin/azulejo'],
)
|
<commit_before>from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
include_package_data=True,
scripts=['bin/azulejo'],
)
<commit_msg>Include JSON config files when installing<commit_after>from setuptools import setup
#see http://pypi.python.org/pypi/stdeb for package building instructions
#or else here: https://github.com/astraw/stdeb
setup(name='azulejo',
version='0.1',
author='Pedro',
author_email='pedro@lamehacks.net',
packages=['azulejo'],
package_data={
'azulejo': ['*.json'],
},
include_package_data=True,
scripts=['bin/azulejo'],
)
|
6311df0e55fe234c39cecf6112091e65c1baf52b
|
tnrs.py
|
tnrs.py
|
import sys
import caching
import urllib
import urllib2
import re
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
|
import sys
import caching
import urllib
import urllib2
import re
import json
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
#response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
response_dict = json.loads(response)
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
|
Use json library instead of eval.
|
Use json library instead of eval.
|
Python
|
mit
|
bendmorris/tax_resolve
|
import sys
import caching
import urllib
import urllib2
import re
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
Use json library instead of eval.
|
import sys
import caching
import urllib
import urllib2
import re
import json
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
#response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
response_dict = json.loads(response)
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
|
<commit_before>import sys
import caching
import urllib
import urllib2
import re
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
<commit_msg>Use json library instead of eval.<commit_after>
|
import sys
import caching
import urllib
import urllib2
import re
import json
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
#response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
response_dict = json.loads(response)
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
|
import sys
import caching
import urllib
import urllib2
import re
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
Use json library instead of eval.import sys
import caching
import urllib
import urllib2
import re
import json
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
#response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
response_dict = json.loads(response)
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
|
<commit_before>import sys
import caching
import urllib
import urllib2
import re
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
<commit_msg>Use json library instead of eval.<commit_after>import sys
import caching
import urllib
import urllib2
import re
import json
from pyquery import PyQuery as p
try: cache = caching.get_cache('tnrs')
except: cache = {}
def tnrs_lookup(name, TIMEOUT=10, CACHE=True):
'''
Look up "name" on the TNRS web service. If a most likely standard name can be identified,
returns that name. Returns False if no or ambiguous result.
'''
name = name.replace("'", '').lower()
if name in cache and CACHE:
return cache[name]
url = "http://tnrs.iplantc.org/tnrsm-svc/matchNames?retrieve=best&names=%s"
# lookup canonical plant names on TNRS web service
try:
response = urllib2.urlopen(url % name.replace(' ', '%20'), timeout=TIMEOUT).read()
#response_dict = eval(response, {}, {'true':True, 'false':False, 'null':None})
response_dict = json.loads(response)
sci_name = response_dict['items'][0]['nameScientific']
if sci_name: result = sci_name
else: result = None
except Exception as e:
print e
result = False
# cache results and return
cache[name] = result
if CACHE: caching.save_cache(cache, 'tnrs')
return result
if __name__=='__main__':
if len(sys.argv) > 1: names = sys.argv[1:]
else: names = [raw_input('species name: ')]
for name in names:
print name, '->', tnrs_lookup(name)
|
4a4eca6fb920d7ba50e97a5bcb0ae8161715ff7a
|
citenet/neighborrank.py
|
citenet/neighborrank.py
|
import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest indegree (most often cited).
nodes = util.top_n_from_dict(graph.in_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.successors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
|
import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest outdegree (most often cited).
nodes = util.top_n_from_dict(graph.out_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.predecessors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
|
Switch in/out degree for neighbor rank
|
Switch in/out degree for neighbor rank
Edges point in the direction of time, or influence. That means we're
concerned with outdegree (amount of nodes influenced by the current
node), not indegree (amount of nodes that influence the current node).
|
Python
|
mit
|
Pringley/citenet
|
import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest indegree (most often cited).
nodes = util.top_n_from_dict(graph.in_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.successors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
Switch in/out degree for neighbor rank
Edges point in the direction of time, or influence. That means we're
concerned with outdegree (amount of nodes influenced by the current
node), not indegree (amount of nodes that influence the current node).
|
import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest outdegree (most often cited).
nodes = util.top_n_from_dict(graph.out_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.predecessors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
|
<commit_before>import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest indegree (most often cited).
nodes = util.top_n_from_dict(graph.in_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.successors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
<commit_msg>Switch in/out degree for neighbor rank
Edges point in the direction of time, or influence. That means we're
concerned with outdegree (amount of nodes influenced by the current
node), not indegree (amount of nodes that influence the current node).<commit_after>
|
import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest outdegree (most often cited).
nodes = util.top_n_from_dict(graph.out_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.predecessors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
|
import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest indegree (most often cited).
nodes = util.top_n_from_dict(graph.in_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.successors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
Switch in/out degree for neighbor rank
Edges point in the direction of time, or influence. That means we're
concerned with outdegree (amount of nodes influenced by the current
node), not indegree (amount of nodes that influence the current node).import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest outdegree (most often cited).
nodes = util.top_n_from_dict(graph.out_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.predecessors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
|
<commit_before>import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest indegree (most often cited).
nodes = util.top_n_from_dict(graph.in_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.successors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
<commit_msg>Switch in/out degree for neighbor rank
Edges point in the direction of time, or influence. That means we're
concerned with outdegree (amount of nodes influenced by the current
node), not indegree (amount of nodes that influence the current node).<commit_after>import networkx as nx
import util
def neighborrank(graph, n=100, neighborhood_depth=2):
"""Compute the NeighborRank of the top n nodes in graph, using the
specified neighborhood_depth."""
# Get top n nodes with highest outdegree (most often cited).
nodes = util.top_n_from_dict(graph.out_degree(), n=n)
# Find neighborhood sizes.
nhood_sizes = {}
for root in nodes:
# Neighborhood begins with just the root.
nhood = set([root])
# Expand the neighborhood repeatedly until the depth is reached.
for i in range(neighborhood_depth):
prev_nhood = nhood.copy()
for node in prev_nhood:
nhood |= set(graph.predecessors(node))
# Update the results dict.
nhood_sizes[root] = len(nhood)
return nhood_sizes
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.