commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
de02c92510a6117aad01be7666d737d2ad861fd7 | send_sms.py | send_sms.py | #!/usr/bin/env python
import datetime
import json
import sys
import requests
import pytz
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
data = json.loads(request.text)
# Set to epoch begin just in case this is a totally new account
latest_contribution = datetime.datetime(1970, 1, 1, 0, 0)
# Get data for the latest contribution
for i in reversed(data):
if i[1] > 0:
latest_contribution = datetime.datetime.strptime(i[0], '%Y/%m/%d')
break
# Find out today's date in PST (since Github uses PST)
today = datetime.datetime.now(pytz.timezone('US/Pacific'))
# Haven't contributed anything today?
if latest_contribution.date() < today.date():
send(message)
else:
send('There was a problem accessing the Github API :(')
| #!/usr/bin/env python
import json
import sys
import requests
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
try:
data = json.loads(request.text)
# Get the number of commits made today
commits_today = data[-1][1]
if not commits_today:
send(message)
except:
send('There was an error getting the number of commits today')
else:
send('There was a problem accessing the Github API :(')
| Improve logic in determining latest contribution | Improve logic in determining latest contribution
Looks like the list will always contain data for the last 366 days, and
the last entry in the list will always contain data for the current day
(PST). Much simpler this way.
Added some generic error-handling just in case this isn't true.
| Python | mit | dellsystem/github-streak-saver | #!/usr/bin/env python
import datetime
import json
import sys
import requests
import pytz
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
data = json.loads(request.text)
# Set to epoch begin just in case this is a totally new account
latest_contribution = datetime.datetime(1970, 1, 1, 0, 0)
# Get data for the latest contribution
for i in reversed(data):
if i[1] > 0:
latest_contribution = datetime.datetime.strptime(i[0], '%Y/%m/%d')
break
# Find out today's date in PST (since Github uses PST)
today = datetime.datetime.now(pytz.timezone('US/Pacific'))
# Haven't contributed anything today?
if latest_contribution.date() < today.date():
send(message)
else:
send('There was a problem accessing the Github API :(')
Improve logic in determining latest contribution
Looks like the list will always contain data for the last 366 days, and
the last entry in the list will always contain data for the current day
(PST). Much simpler this way.
Added some generic error-handling just in case this isn't true. | #!/usr/bin/env python
import json
import sys
import requests
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
try:
data = json.loads(request.text)
# Get the number of commits made today
commits_today = data[-1][1]
if not commits_today:
send(message)
except:
send('There was an error getting the number of commits today')
else:
send('There was a problem accessing the Github API :(')
| <commit_before>#!/usr/bin/env python
import datetime
import json
import sys
import requests
import pytz
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
data = json.loads(request.text)
# Set to epoch begin just in case this is a totally new account
latest_contribution = datetime.datetime(1970, 1, 1, 0, 0)
# Get data for the latest contribution
for i in reversed(data):
if i[1] > 0:
latest_contribution = datetime.datetime.strptime(i[0], '%Y/%m/%d')
break
# Find out today's date in PST (since Github uses PST)
today = datetime.datetime.now(pytz.timezone('US/Pacific'))
# Haven't contributed anything today?
if latest_contribution.date() < today.date():
send(message)
else:
send('There was a problem accessing the Github API :(')
<commit_msg>Improve logic in determining latest contribution
Looks like the list will always contain data for the last 366 days, and
the last entry in the list will always contain data for the current day
(PST). Much simpler this way.
Added some generic error-handling just in case this isn't true.<commit_after> | #!/usr/bin/env python
import json
import sys
import requests
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
try:
data = json.loads(request.text)
# Get the number of commits made today
commits_today = data[-1][1]
if not commits_today:
send(message)
except:
send('There was an error getting the number of commits today')
else:
send('There was a problem accessing the Github API :(')
| #!/usr/bin/env python
import datetime
import json
import sys
import requests
import pytz
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
data = json.loads(request.text)
# Set to epoch begin just in case this is a totally new account
latest_contribution = datetime.datetime(1970, 1, 1, 0, 0)
# Get data for the latest contribution
for i in reversed(data):
if i[1] > 0:
latest_contribution = datetime.datetime.strptime(i[0], '%Y/%m/%d')
break
# Find out today's date in PST (since Github uses PST)
today = datetime.datetime.now(pytz.timezone('US/Pacific'))
# Haven't contributed anything today?
if latest_contribution.date() < today.date():
send(message)
else:
send('There was a problem accessing the Github API :(')
Improve logic in determining latest contribution
Looks like the list will always contain data for the last 366 days, and
the last entry in the list will always contain data for the current day
(PST). Much simpler this way.
Added some generic error-handling just in case this isn't true.#!/usr/bin/env python
import json
import sys
import requests
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
try:
data = json.loads(request.text)
# Get the number of commits made today
commits_today = data[-1][1]
if not commits_today:
send(message)
except:
send('There was an error getting the number of commits today')
else:
send('There was a problem accessing the Github API :(')
| <commit_before>#!/usr/bin/env python
import datetime
import json
import sys
import requests
import pytz
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
data = json.loads(request.text)
# Set to epoch begin just in case this is a totally new account
latest_contribution = datetime.datetime(1970, 1, 1, 0, 0)
# Get data for the latest contribution
for i in reversed(data):
if i[1] > 0:
latest_contribution = datetime.datetime.strptime(i[0], '%Y/%m/%d')
break
# Find out today's date in PST (since Github uses PST)
today = datetime.datetime.now(pytz.timezone('US/Pacific'))
# Haven't contributed anything today?
if latest_contribution.date() < today.date():
send(message)
else:
send('There was a problem accessing the Github API :(')
<commit_msg>Improve logic in determining latest contribution
Looks like the list will always contain data for the last 366 days, and
the last entry in the list will always contain data for the current day
(PST). Much simpler this way.
Added some generic error-handling just in case this isn't true.<commit_after>#!/usr/bin/env python
import json
import sys
import requests
from twilio.rest import TwilioRestClient
import conf
def send(s):
client.sms.messages.create(to=conf.TO, from_=conf.FROM, body=s)
# Use the first arg as the message to send, or use the default if not specified
default_message = "You haven't committed anything today!"
message = sys.argv[1] if len(sys.argv) > 1 else default_message
# Initialise twilio stuff
client = TwilioRestClient(conf.ACCOUNT_SID, conf.AUTH_TOKEN)
# Get Github contributions activity
url = 'https://github.com/users/%s/contributions_calendar_data' % conf.USERNAME
request = requests.get(url)
if request.ok:
try:
data = json.loads(request.text)
# Get the number of commits made today
commits_today = data[-1][1]
if not commits_today:
send(message)
except:
send('There was an error getting the number of commits today')
else:
send('There was a problem accessing the Github API :(')
|
67913476c33a4f8b3635f63c379bde0a48a5e714 | admin.py | admin.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
| Disable date_hierarchy for now since it requires tzinfo in MySQL | Disable date_hierarchy for now since it requires tzinfo in MySQL
| Python | mit | mback2k/django-app-bugs | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
Disable date_hierarchy for now since it requires tzinfo in MySQL | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
| <commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
<commit_msg>Disable date_hierarchy for now since it requires tzinfo in MySQL<commit_after> | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
Disable date_hierarchy for now since it requires tzinfo in MySQL# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
| <commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
<commit_msg>Disable date_hierarchy for now since it requires tzinfo in MySQL<commit_after># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Crash
class CrashAdmin(admin.ModelAdmin):
search_fields = ('report',)
list_display = ('application', 'build',
'crdate', 'tstamp',
'is_solved', 'is_obsolete')
list_filter = ('application', 'build',
'is_solved', 'is_obsolete',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Crash, CrashAdmin)
|
5b18ec2219cbdfa479a1d32f9bf62f7460171f09 | live_studio/queue/models.py | live_studio/queue/models.py | import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
| import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
verbose_name_plural = 'Entries'
| Set verbose_name_plural properly for queue.Entry. | Set verbose_name_plural properly for queue.Entry.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>
| Python | agpl-3.0 | debian-live/live-studio,debian-live/live-studio,lamby/live-studio,lamby/live-studio,lamby/live-studio,debian-live/live-studio | import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
Set verbose_name_plural properly for queue.Entry.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org> | import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
verbose_name_plural = 'Entries'
| <commit_before>import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
<commit_msg>Set verbose_name_plural properly for queue.Entry.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org><commit_after> | import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
verbose_name_plural = 'Entries'
| import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
Set verbose_name_plural properly for queue.Entry.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
verbose_name_plural = 'Entries'
| <commit_before>import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
<commit_msg>Set verbose_name_plural properly for queue.Entry.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org><commit_after>import datetime
from django.db import models
from .managers import EntryManager
class Entry(models.Model):
config = models.ForeignKey('config.Config')
enqueued = models.DateTimeField(default=datetime.datetime.utcnow)
started = models.DateTimeField(null=True)
finished = models.DateTimeField(null=True)
success = models.BooleanField(default=False)
objects = EntryManager()
class Meta:
ordering = ('-enqueued',)
verbose_name_plural = 'Entries'
|
0f183708aa8a0c9503d847a65f072de07dc800ea | tests.py | tests.py | import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
if __name__ == '__main__':
unittest.main()
| import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
def test_args_maximize(self):
options = MockOptions()
project = {
'cwd': '~',
'tabs': [],
}
launcher = Launcher(options, lazy=True)
args = launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
| Check for maximize in args. | Check for maximize in args.
| Python | mit | GoldenLine/gtlaunch | import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
if __name__ == '__main__':
unittest.main()
Check for maximize in args. | import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
def test_args_maximize(self):
options = MockOptions()
project = {
'cwd': '~',
'tabs': [],
}
launcher = Launcher(options, lazy=True)
args = launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
if __name__ == '__main__':
unittest.main()
<commit_msg>Check for maximize in args.<commit_after> | import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
def test_args_maximize(self):
options = MockOptions()
project = {
'cwd': '~',
'tabs': [],
}
launcher = Launcher(options, lazy=True)
args = launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
| import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
if __name__ == '__main__':
unittest.main()
Check for maximize in args.import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
def test_args_maximize(self):
options = MockOptions()
project = {
'cwd': '~',
'tabs': [],
}
launcher = Launcher(options, lazy=True)
args = launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
if __name__ == '__main__':
unittest.main()
<commit_msg>Check for maximize in args.<commit_after>import unittest
from gtlaunch import Launcher
class MockOptions(object):
def __init__(self):
self.verbose = False
self.config = ''
self.project = ''
class LauncherTestCase(unittest.TestCase):
def test_lazy_init(self):
options = MockOptions()
launcher = Launcher(options, lazy=True)
self.assertIsNone(launcher.project)
def test_args_maximize(self):
options = MockOptions()
project = {
'cwd': '~',
'tabs': [],
}
launcher = Launcher(options, lazy=True)
args = launcher.build_args(project)
self.assertIn('--maximize', args)
if __name__ == '__main__':
unittest.main()
|
e055874545dcc0e1205bad2b419076c204ffcf9c | duty_cycle.py | duty_cycle.py | #!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
setup(pin)
up = range(10)
down = range(9)
down.reverse()
bpm = 70
period = 60.0 / bpm # seconds
time_per_level = period / len(up + down)
for i in range(10):
for j in (up + down):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
| #!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
bpm = 70
setup(pin)
up = range(10)
down = range(9)
down.reverse()
spectrum = up + down + [-1]
period = 60.0 / bpm # seconds
time_per_level = period / len(spectrum)
for i in range(10):
for j in (spectrum):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
| Add a 0.0 duty cycle: brief moment of off time. | Add a 0.0 duty cycle: brief moment of off time.
| Python | mit | zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie | #!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
setup(pin)
up = range(10)
down = range(9)
down.reverse()
bpm = 70
period = 60.0 / bpm # seconds
time_per_level = period / len(up + down)
for i in range(10):
for j in (up + down):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
Add a 0.0 duty cycle: brief moment of off time. | #!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
bpm = 70
setup(pin)
up = range(10)
down = range(9)
down.reverse()
spectrum = up + down + [-1]
period = 60.0 / bpm # seconds
time_per_level = period / len(spectrum)
for i in range(10):
for j in (spectrum):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
| <commit_before>#!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
setup(pin)
up = range(10)
down = range(9)
down.reverse()
bpm = 70
period = 60.0 / bpm # seconds
time_per_level = period / len(up + down)
for i in range(10):
for j in (up + down):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
<commit_msg>Add a 0.0 duty cycle: brief moment of off time.<commit_after> | #!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
bpm = 70
setup(pin)
up = range(10)
down = range(9)
down.reverse()
spectrum = up + down + [-1]
period = 60.0 / bpm # seconds
time_per_level = period / len(spectrum)
for i in range(10):
for j in (spectrum):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
| #!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
setup(pin)
up = range(10)
down = range(9)
down.reverse()
bpm = 70
period = 60.0 / bpm # seconds
time_per_level = period / len(up + down)
for i in range(10):
for j in (up + down):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
Add a 0.0 duty cycle: brief moment of off time.#!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
bpm = 70
setup(pin)
up = range(10)
down = range(9)
down.reverse()
spectrum = up + down + [-1]
period = 60.0 / bpm # seconds
time_per_level = period / len(spectrum)
for i in range(10):
for j in (spectrum):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
| <commit_before>#!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
setup(pin)
up = range(10)
down = range(9)
down.reverse()
bpm = 70
period = 60.0 / bpm # seconds
time_per_level = period / len(up + down)
for i in range(10):
for j in (up + down):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
<commit_msg>Add a 0.0 duty cycle: brief moment of off time.<commit_after>#!/usr/bin/env python
from blinkenlights import dimmer, setup, cleanup
pin = 18
bpm = 70
setup(pin)
up = range(10)
down = range(9)
down.reverse()
spectrum = up + down + [-1]
period = 60.0 / bpm # seconds
time_per_level = period / len(spectrum)
for i in range(10):
for j in (spectrum):
brightness = (j+1) / 10.0
dimmer(brightness, time_per_level, pin)
cleanup()
|
0e740b5fd924b113173b546f2dd2b8fa1e55d074 | indra/sparser/sparser_api.py | indra/sparser/sparser_api.py | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError:
logger.error('Could not parse XML string')
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
| from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError as e:
logger.error('Could not parse XML string')
logger.error(e)
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
| Print XML parse errors in Sparser API | Print XML parse errors in Sparser API
| Python | bsd-2-clause | sorgerlab/belpy,bgyori/indra,johnbachman/belpy,bgyori/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,sorgerlab/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError:
logger.error('Could not parse XML string')
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
Print XML parse errors in Sparser API | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError as e:
logger.error('Could not parse XML string')
logger.error(e)
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
| <commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError:
logger.error('Could not parse XML string')
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
<commit_msg>Print XML parse errors in Sparser API<commit_after> | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError as e:
logger.error('Could not parse XML string')
logger.error(e)
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
| from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError:
logger.error('Could not parse XML string')
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
Print XML parse errors in Sparser APIfrom __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError as e:
logger.error('Could not parse XML string')
logger.error(e)
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
| <commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError:
logger.error('Could not parse XML string')
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
<commit_msg>Print XML parse errors in Sparser API<commit_after>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
from indra.sparser.processor import SparserProcessor
logger = logging.getLogger('sparser')
def process_xml(xml_str):
try:
tree = ET.XML(xml_str, parser=UTB())
except ET.ParseError as e:
logger.error('Could not parse XML string')
logger.error(e)
return None
sp = _process_elementtree(tree)
return sp
def _process_elementtree(tree):
sp = SparserProcessor(tree)
sp.get_modifications()
sp.get_activations()
return sp
|
0a850f935ce6cc48a68cffbef64c127daa22a42f | write.py | write.py | import colour
import csv
import json
import os
import pprint
# write to file as json, csv, markdown, plaintext or print table
def write_data(data, user, format=None):
if format is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
f = open(directory + user + '.' + format, 'w')
if format == 'json':
f.write(json.dumps(data, indent=4))
elif format == 'csv':
keys = data[0].keys()
dw = csv.DictWriter(f, fieldnames=keys)
dw.writeheader()
dw.writerows(data)
elif format == 'md':
f.write('## %s - GitHub repositories\n' % user)
for row in data:
f.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif format == 'txt':
f.write('%s - GitHub repositories\n\n' % user)
for row in data:
f.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
f.close()
| import csv
import json
import os
from tabulate import tabulate
def write_data(d, u, f=None):
if f is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
file = open(directory + u + '.' + f, 'w')
if f == 'json':
file.write(json.dumps(d, indent=4))
elif f == 'csv':
keys = d[0].keys()
dw = csv.DictWriter(file, fieldnames=keys)
dw.writeheader()
dw.writerows(d)
elif f == 'md':
file.write('## %s - GitHub repositories\n' % u)
for row in d:
file.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif f == 'txt':
file.write('%s - GitHub repositories\n\n' % u)
for row in d:
file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
file.close()
else:
print(tabulate(d, headers="keys"))
| Print table if no file format provided | Print table if no file format provided
| Python | mit | kshvmdn/github-list,kshvmdn/github-list,kshvmdn/github-list | import colour
import csv
import json
import os
import pprint
# write to file as json, csv, markdown, plaintext or print table
def write_data(data, user, format=None):
if format is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
f = open(directory + user + '.' + format, 'w')
if format == 'json':
f.write(json.dumps(data, indent=4))
elif format == 'csv':
keys = data[0].keys()
dw = csv.DictWriter(f, fieldnames=keys)
dw.writeheader()
dw.writerows(data)
elif format == 'md':
f.write('## %s - GitHub repositories\n' % user)
for row in data:
f.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif format == 'txt':
f.write('%s - GitHub repositories\n\n' % user)
for row in data:
f.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
f.close()
Print table if no file format provided | import csv
import json
import os
from tabulate import tabulate
def write_data(d, u, f=None):
if f is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
file = open(directory + u + '.' + f, 'w')
if f == 'json':
file.write(json.dumps(d, indent=4))
elif f == 'csv':
keys = d[0].keys()
dw = csv.DictWriter(file, fieldnames=keys)
dw.writeheader()
dw.writerows(d)
elif f == 'md':
file.write('## %s - GitHub repositories\n' % u)
for row in d:
file.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif f == 'txt':
file.write('%s - GitHub repositories\n\n' % u)
for row in d:
file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
file.close()
else:
print(tabulate(d, headers="keys"))
| <commit_before>import colour
import csv
import json
import os
import pprint
# write to file as json, csv, markdown, plaintext or print table
def write_data(data, user, format=None):
if format is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
f = open(directory + user + '.' + format, 'w')
if format == 'json':
f.write(json.dumps(data, indent=4))
elif format == 'csv':
keys = data[0].keys()
dw = csv.DictWriter(f, fieldnames=keys)
dw.writeheader()
dw.writerows(data)
elif format == 'md':
f.write('## %s - GitHub repositories\n' % user)
for row in data:
f.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif format == 'txt':
f.write('%s - GitHub repositories\n\n' % user)
for row in data:
f.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
f.close()
<commit_msg>Print table if no file format provided<commit_after> | import csv
import json
import os
from tabulate import tabulate
def write_data(d, u, f=None):
if f is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
file = open(directory + u + '.' + f, 'w')
if f == 'json':
file.write(json.dumps(d, indent=4))
elif f == 'csv':
keys = d[0].keys()
dw = csv.DictWriter(file, fieldnames=keys)
dw.writeheader()
dw.writerows(d)
elif f == 'md':
file.write('## %s - GitHub repositories\n' % u)
for row in d:
file.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif f == 'txt':
file.write('%s - GitHub repositories\n\n' % u)
for row in d:
file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
file.close()
else:
print(tabulate(d, headers="keys"))
| import colour
import csv
import json
import os
import pprint
# write to file as json, csv, markdown, plaintext or print table
def write_data(data, user, format=None):
if format is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
f = open(directory + user + '.' + format, 'w')
if format == 'json':
f.write(json.dumps(data, indent=4))
elif format == 'csv':
keys = data[0].keys()
dw = csv.DictWriter(f, fieldnames=keys)
dw.writeheader()
dw.writerows(data)
elif format == 'md':
f.write('## %s - GitHub repositories\n' % user)
for row in data:
f.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif format == 'txt':
f.write('%s - GitHub repositories\n\n' % user)
for row in data:
f.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
f.close()
Print table if no file format providedimport csv
import json
import os
from tabulate import tabulate
def write_data(d, u, f=None):
if f is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
file = open(directory + u + '.' + f, 'w')
if f == 'json':
file.write(json.dumps(d, indent=4))
elif f == 'csv':
keys = d[0].keys()
dw = csv.DictWriter(file, fieldnames=keys)
dw.writeheader()
dw.writerows(d)
elif f == 'md':
file.write('## %s - GitHub repositories\n' % u)
for row in d:
file.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif f == 'txt':
file.write('%s - GitHub repositories\n\n' % u)
for row in d:
file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
file.close()
else:
print(tabulate(d, headers="keys"))
| <commit_before>import colour
import csv
import json
import os
import pprint
# write to file as json, csv, markdown, plaintext or print table
def write_data(data, user, format=None):
if format is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
f = open(directory + user + '.' + format, 'w')
if format == 'json':
f.write(json.dumps(data, indent=4))
elif format == 'csv':
keys = data[0].keys()
dw = csv.DictWriter(f, fieldnames=keys)
dw.writeheader()
dw.writerows(data)
elif format == 'md':
f.write('## %s - GitHub repositories\n' % user)
for row in data:
f.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif format == 'txt':
f.write('%s - GitHub repositories\n\n' % user)
for row in data:
f.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
f.close()
<commit_msg>Print table if no file format provided<commit_after>import csv
import json
import os
from tabulate import tabulate
def write_data(d, u, f=None):
if f is not None:
directory = './data/'
if not os.path.exists(directory):
os.makedirs(directory)
file = open(directory + u + '.' + f, 'w')
if f == 'json':
file.write(json.dumps(d, indent=4))
elif f == 'csv':
keys = d[0].keys()
dw = csv.DictWriter(file, fieldnames=keys)
dw.writeheader()
dw.writerows(d)
elif f == 'md':
file.write('## %s - GitHub repositories\n' % u)
for row in d:
file.write(
'#### {}\n\n{} \n_{}_, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
elif f == 'txt':
file.write('%s - GitHub repositories\n\n' % u)
for row in d:
file.write('{}\n{}\n{}, {} star(s)\n\n'.format(row['name'],
row['desc'],
row['lang'],
row['stars']))
file.close()
else:
print(tabulate(d, headers="keys"))
|
991889003ca31bf13b326b7c1788ecbe32801528 | profile_collection/startup/99-bluesky.py | profile_collection/startup/99-bluesky.py | from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
| from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
gs.DETS.append(det4)
#from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
| Add det4 to global dets | Add det4 to global dets
| Python | bsd-2-clause | NSLS-II-IXS/ipython_ophyd,NSLS-II-IXS/ipython_ophyd | from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
Add det4 to global dets | from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
gs.DETS.append(det4)
#from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
| <commit_before>from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
<commit_msg>Add det4 to global dets<commit_after> | from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
gs.DETS.append(det4)
#from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
| from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
Add det4 to global detsfrom bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
gs.DETS.append(det4)
#from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
| <commit_before>from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
<commit_msg>Add det4 to global dets<commit_after>from bluesky.global_state import (resume, abort, stop, panic, all_is_well,
state)
from bluesky.callbacks.olog import OlogCallback
from bluesky.global_state import gs
olog_cb = OlogCallback('Data Acquisition')
gs.RE.subscribe('start', olog_cb)
gs.DETS.append(det4)
#from bluesky.scientific_callbacks import plot_peak_stats
from bluesky.plans import *
from bluesky.spec_api import ct, ascan, dscan
|
b65c5157c9e4515b01558201b983727d3a3154bd | src/syntax/relative_clauses.py | src/syntax/relative_clauses.py | __author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
self.has_wh_word |= tree.label() == "WHNP"
for node in tree:
self.parse_tree(node) | __author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
if tree.label() == "SBAR":
for node in tree:
if type(node) == Tree:
self.has_wh_word |= node.label() == "WHNP"
for node in tree:
self.parse_tree(node) | Fix detection of relative clause | Fix detection of relative clause
| Python | mit | Somsubhra/Simplify,Somsubhra/Simplify,Somsubhra/Simplify | __author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
self.has_wh_word |= tree.label() == "WHNP"
for node in tree:
self.parse_tree(node)Fix detection of relative clause | __author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
if tree.label() == "SBAR":
for node in tree:
if type(node) == Tree:
self.has_wh_word |= node.label() == "WHNP"
for node in tree:
self.parse_tree(node) | <commit_before>__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
self.has_wh_word |= tree.label() == "WHNP"
for node in tree:
self.parse_tree(node)<commit_msg>Fix detection of relative clause<commit_after> | __author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
if tree.label() == "SBAR":
for node in tree:
if type(node) == Tree:
self.has_wh_word |= node.label() == "WHNP"
for node in tree:
self.parse_tree(node) | __author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
self.has_wh_word |= tree.label() == "WHNP"
for node in tree:
self.parse_tree(node)Fix detection of relative clause__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
if tree.label() == "SBAR":
for node in tree:
if type(node) == Tree:
self.has_wh_word |= node.label() == "WHNP"
for node in tree:
self.parse_tree(node) | <commit_before>__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
self.has_wh_word |= tree.label() == "WHNP"
for node in tree:
self.parse_tree(node)<commit_msg>Fix detection of relative clause<commit_after>__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
if tree.label() == "SBAR":
for node in tree:
if type(node) == Tree:
self.has_wh_word |= node.label() == "WHNP"
for node in tree:
self.parse_tree(node) |
02e4a051e6e463d06195e9efe6a25c84cc046b55 | tests/base.py | tests/base.py | import unittest
from app import create_app, db
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = {
"username": "brian",
"password": "password"
}
with self.app.app_context():
db.create_all()
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
| import unittest
import json
from app import create_app, db
from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
"password": "password"
})
with self.app.app_context():
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
| Add authorization and content-type headers to request for tests | [CHORE] Add authorization and content-type headers to request for tests
| Python | mit | brayoh/bucket-list-api | import unittest
from app import create_app, db
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = {
"username": "brian",
"password": "password"
}
with self.app.app_context():
db.create_all()
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
[CHORE] Add authorization and content-type headers to request for tests | import unittest
import json
from app import create_app, db
from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
"password": "password"
})
with self.app.app_context():
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
| <commit_before>import unittest
from app import create_app, db
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = {
"username": "brian",
"password": "password"
}
with self.app.app_context():
db.create_all()
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
<commit_msg>[CHORE] Add authorization and content-type headers to request for tests<commit_after> | import unittest
import json
from app import create_app, db
from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
"password": "password"
})
with self.app.app_context():
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
| import unittest
from app import create_app, db
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = {
"username": "brian",
"password": "password"
}
with self.app.app_context():
db.create_all()
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
[CHORE] Add authorization and content-type headers to request for testsimport unittest
import json
from app import create_app, db
from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
"password": "password"
})
with self.app.app_context():
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
| <commit_before>import unittest
from app import create_app, db
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = {
"username": "brian",
"password": "password"
}
with self.app.app_context():
db.create_all()
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
<commit_msg>[CHORE] Add authorization and content-type headers to request for tests<commit_after>import unittest
import json
from app import create_app, db
from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
"password": "password"
})
with self.app.app_context():
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
|
6ad77e5a9cdbe63ca706bd7c7d3aebb7a34e4cc5 | mopidy/__init__.py | mopidy/__init__.py | from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
compatible_py2 = (2, 7) <= sys.version_info < (3,)
compatible_py3 = (3, 7) <= sys.version_info
if not (compatible_py2 or compatible_py3):
sys.exit(
'ERROR: Mopidy requires Python 2.7 or >=3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
| from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
if not sys.version_info >= (3, 7):
sys.exit(
'ERROR: Mopidy requires Python >= 3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
| Exit if imported on Python 2 | Exit if imported on Python 2
| Python | apache-2.0 | kingosticks/mopidy,adamcik/mopidy,jcass77/mopidy,jodal/mopidy,mopidy/mopidy,kingosticks/mopidy,mopidy/mopidy,adamcik/mopidy,jcass77/mopidy,kingosticks/mopidy,jodal/mopidy,jodal/mopidy,mopidy/mopidy,jcass77/mopidy,adamcik/mopidy | from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
compatible_py2 = (2, 7) <= sys.version_info < (3,)
compatible_py3 = (3, 7) <= sys.version_info
if not (compatible_py2 or compatible_py3):
sys.exit(
'ERROR: Mopidy requires Python 2.7 or >=3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
Exit if imported on Python 2 | from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
if not sys.version_info >= (3, 7):
sys.exit(
'ERROR: Mopidy requires Python >= 3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
| <commit_before>from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
compatible_py2 = (2, 7) <= sys.version_info < (3,)
compatible_py3 = (3, 7) <= sys.version_info
if not (compatible_py2 or compatible_py3):
sys.exit(
'ERROR: Mopidy requires Python 2.7 or >=3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
<commit_msg>Exit if imported on Python 2<commit_after> | from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
if not sys.version_info >= (3, 7):
sys.exit(
'ERROR: Mopidy requires Python >= 3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
| from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
compatible_py2 = (2, 7) <= sys.version_info < (3,)
compatible_py3 = (3, 7) <= sys.version_info
if not (compatible_py2 or compatible_py3):
sys.exit(
'ERROR: Mopidy requires Python 2.7 or >=3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
Exit if imported on Python 2from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
if not sys.version_info >= (3, 7):
sys.exit(
'ERROR: Mopidy requires Python >= 3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
| <commit_before>from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
compatible_py2 = (2, 7) <= sys.version_info < (3,)
compatible_py3 = (3, 7) <= sys.version_info
if not (compatible_py2 or compatible_py3):
sys.exit(
'ERROR: Mopidy requires Python 2.7 or >=3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
<commit_msg>Exit if imported on Python 2<commit_after>from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import warnings
if not sys.version_info >= (3, 7):
sys.exit(
'ERROR: Mopidy requires Python >= 3.7, but found %s.' %
platform.python_version())
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '3.0.0a2'
|
6f0454669be842309d1c31deee9c377d9c6ffff5 | lightstep/http_connection.py | lightstep/http_connection.py | """ Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": report.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
| """ Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": auth.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
| Add access token from auth not report | Add access token from auth not report
| Python | mit | lightstephq/lightstep-tracer-python | """ Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": report.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
Add access token from auth not report | """ Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": auth.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
| <commit_before>""" Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": report.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
<commit_msg>Add access token from auth not report<commit_after> | """ Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": auth.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
| """ Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": report.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
Add access token from auth not report""" Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": auth.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
| <commit_before>""" Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": report.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
<commit_msg>Add access token from auth not report<commit_after>""" Connection class establishes HTTP connection with server.
Utilized to send Proto Report Requests.
"""
import threading
import requests
from lightstep.collector_pb2 import ReportResponse
class _HTTPConnection(object):
"""Instances of _Connection are used to establish a connection to the
server via HTTP protocol.
"""
def __init__(self, collector_url, timeout_seconds):
self._collector_url = collector_url
self._lock = threading.Lock()
self.ready = True
self._timeout_seconds = timeout_seconds
def open(self):
"""Establish HTTP connection to the server.
"""
pass
# May throw an Exception on failure.
def report(self, *args, **kwargs):
"""Report to the server."""
auth = args[0]
report = args[1]
with self._lock:
try:
report.auth.access_token = auth.access_token
headers = {"Content-Type": "application/octet-stream",
"Accept": "application/octet-stream",
"Lightstep-Access-Token": auth.access_token}
r = requests.post(
self._collector_url,
headers=headers,
data=report.SerializeToString(),
timeout=self._timeout_seconds)
resp = ReportResponse()
resp.ParseFromString(r.content)
return resp
except requests.exceptions.RequestException as err:
raise err
def close(self):
"""Close HTTP connection to the server."""
self.ready = False
pass
|
ce9cbc4144c105e9cb59836274ef25a29a9b20a7 | webserver/codemanagement/tasks.py | webserver/codemanagement/tasks.py | from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
# Create a submission for the HEAD commit
TeamSubmission.objects.create(team=instance.team,
commit=instance.repository.repo['HEAD'].id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
| from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
try:
commit = instance.repository.repo['HEAD']
except KeyError:
# Log an error if we can't get a commit
msg = "Unable to tag {}'s repo. Bad ref 'HEAD'. Is the repo empty?"
logger.error(msg.format(team_name))
else:
# Create a submission for the HEAD commit
TeamSubmission.objects.create(teamclient=instance,
commit=commit.id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
| Handle attempts to tag empty shell repos | Handle attempts to tag empty shell repos
| Python | bsd-3-clause | siggame/webserver,siggame/webserver,siggame/webserver | from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
# Create a submission for the HEAD commit
TeamSubmission.objects.create(team=instance.team,
commit=instance.repository.repo['HEAD'].id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
Handle attempts to tag empty shell repos | from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
try:
commit = instance.repository.repo['HEAD']
except KeyError:
# Log an error if we can't get a commit
msg = "Unable to tag {}'s repo. Bad ref 'HEAD'. Is the repo empty?"
logger.error(msg.format(team_name))
else:
# Create a submission for the HEAD commit
TeamSubmission.objects.create(teamclient=instance,
commit=commit.id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
| <commit_before>from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
# Create a submission for the HEAD commit
TeamSubmission.objects.create(team=instance.team,
commit=instance.repository.repo['HEAD'].id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
<commit_msg>Handle attempts to tag empty shell repos<commit_after> | from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
try:
commit = instance.repository.repo['HEAD']
except KeyError:
# Log an error if we can't get a commit
msg = "Unable to tag {}'s repo. Bad ref 'HEAD'. Is the repo empty?"
logger.error(msg.format(team_name))
else:
# Create a submission for the HEAD commit
TeamSubmission.objects.create(teamclient=instance,
commit=commit.id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
| from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
# Create a submission for the HEAD commit
TeamSubmission.objects.create(team=instance.team,
commit=instance.repository.repo['HEAD'].id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
Handle attempts to tag empty shell reposfrom celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
try:
commit = instance.repository.repo['HEAD']
except KeyError:
# Log an error if we can't get a commit
msg = "Unable to tag {}'s repo. Bad ref 'HEAD'. Is the repo empty?"
logger.error(msg.format(team_name))
else:
# Create a submission for the HEAD commit
TeamSubmission.objects.create(teamclient=instance,
commit=commit.id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
| <commit_before>from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
# Create a submission for the HEAD commit
TeamSubmission.objects.create(team=instance.team,
commit=instance.repository.repo['HEAD'].id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
<commit_msg>Handle attempts to tag empty shell repos<commit_after>from celery import task
from celery.result import AsyncResult
from .models import TeamSubmission
import logging
logger = logging.getLogger(__name__)
@task()
def create_shellai_tag(instance):
"""Tags the repo's HEAD as "ShellAI" to provide a default tag for
the arena to use"""
team_name = instance.team.name
if instance.repository.task_id is not None:
# Wait for the repo to be created
AsyncResult(instance.repository.task_id).wait()
msg = "Waiting for {}'s repository to be created..."
logger.info(msg.format(team_name))
logger.info("{}'s repository is ready".format(team_name))
try:
commit = instance.repository.repo['HEAD']
except KeyError:
# Log an error if we can't get a commit
msg = "Unable to tag {}'s repo. Bad ref 'HEAD'. Is the repo empty?"
logger.error(msg.format(team_name))
else:
# Create a submission for the HEAD commit
TeamSubmission.objects.create(teamclient=instance,
commit=commit.id,
name="ShellAI",
submitter=None)
logger.info("Tagged {}'s repo".format(team_name))
|
855c7b56ff92efce90dc4953ebabc4aca07f5eb8 | domains/integrator_chains/fmrb_sci_examples/scripts/lqr.py | domains/integrator_chains/fmrb_sci_examples/scripts/lqr.py | #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
| #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class StateFeedback(rospy.Subscriber):
def __init__(self, intopic, outtopic, K=None):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-np.dot(self.K, np.asarray(vs.point))]))
class LQRController(StateFeedback):
def __init__(self, intopic, outtopic,
A=None, B=None, Q=None, R=None):
if A is None and B is None:
A = np.array([[0., 1, 0],
[0, 0, 1],
[0, 0, 0]])
B = np.array([[0.], [0], [1]])
if Q is None and R is None:
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
StateFeedback.__init__(self, intopic, outtopic, K)
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
n = 1 # Number of output dimensions
m = 3 # Number of derivatives
A = np.diag(np.ones(m-1), k=1)
B = np.zeros((m, 1))
B[-1,0] = 1.0
Q = np.diag(np.ones(m))
R = np.diag([1.])
lqrc = LQRController("input", "output", A, B, Q, R)
rospy.spin()
| Improve LQR example for integrator_chains domain | Improve LQR example for integrator_chains domain
| Python | bsd-3-clause | fmrchallenge/fmrbenchmark,fmrchallenge/fmrbenchmark,fmrchallenge/fmrbenchmark | #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
Improve LQR example for integrator_chains domain | #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class StateFeedback(rospy.Subscriber):
def __init__(self, intopic, outtopic, K=None):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-np.dot(self.K, np.asarray(vs.point))]))
class LQRController(StateFeedback):
def __init__(self, intopic, outtopic,
A=None, B=None, Q=None, R=None):
if A is None and B is None:
A = np.array([[0., 1, 0],
[0, 0, 1],
[0, 0, 0]])
B = np.array([[0.], [0], [1]])
if Q is None and R is None:
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
StateFeedback.__init__(self, intopic, outtopic, K)
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
n = 1 # Number of output dimensions
m = 3 # Number of derivatives
A = np.diag(np.ones(m-1), k=1)
B = np.zeros((m, 1))
B[-1,0] = 1.0
Q = np.diag(np.ones(m))
R = np.diag([1.])
lqrc = LQRController("input", "output", A, B, Q, R)
rospy.spin()
| <commit_before>#!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
<commit_msg>Improve LQR example for integrator_chains domain<commit_after> | #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class StateFeedback(rospy.Subscriber):
def __init__(self, intopic, outtopic, K=None):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-np.dot(self.K, np.asarray(vs.point))]))
class LQRController(StateFeedback):
def __init__(self, intopic, outtopic,
A=None, B=None, Q=None, R=None):
if A is None and B is None:
A = np.array([[0., 1, 0],
[0, 0, 1],
[0, 0, 0]])
B = np.array([[0.], [0], [1]])
if Q is None and R is None:
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
StateFeedback.__init__(self, intopic, outtopic, K)
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
n = 1 # Number of output dimensions
m = 3 # Number of derivatives
A = np.diag(np.ones(m-1), k=1)
B = np.zeros((m, 1))
B[-1,0] = 1.0
Q = np.diag(np.ones(m))
R = np.diag([1.])
lqrc = LQRController("input", "output", A, B, Q, R)
rospy.spin()
| #!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
Improve LQR example for integrator_chains domain#!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class StateFeedback(rospy.Subscriber):
def __init__(self, intopic, outtopic, K=None):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-np.dot(self.K, np.asarray(vs.point))]))
class LQRController(StateFeedback):
def __init__(self, intopic, outtopic,
A=None, B=None, Q=None, R=None):
if A is None and B is None:
A = np.array([[0., 1, 0],
[0, 0, 1],
[0, 0, 0]])
B = np.array([[0.], [0], [1]])
if Q is None and R is None:
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
StateFeedback.__init__(self, intopic, outtopic, K)
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
n = 1 # Number of output dimensions
m = 3 # Number of derivatives
A = np.diag(np.ones(m-1), k=1)
B = np.zeros((m, 1))
B[-1,0] = 1.0
Q = np.diag(np.ones(m))
R = np.diag([1.])
lqrc = LQRController("input", "output", A, B, Q, R)
rospy.spin()
| <commit_before>#!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
class LQRController(rospy.Subscriber):
def __init__(self, intopic, outtopic):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-(vs.point[0] + 2.4142*vs.point[1] + 2.4142*vs.point[2])]))
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
lqrc = LQRController("input", "output")
rospy.spin()
<commit_msg>Improve LQR example for integrator_chains domain<commit_after>#!/usr/bin/env python
from __future__ import print_function
import roslib; roslib.load_manifest('dynamaestro')
import rospy
from dynamaestro.msg import VectorStamped
from control import lqr
import numpy as np
class StateFeedback(rospy.Subscriber):
def __init__(self, intopic, outtopic, K=None):
rospy.Subscriber.__init__(self, outtopic, VectorStamped, self.read_state)
self.intopic = rospy.Publisher(intopic, VectorStamped, queue_size=1)
self.K = K
def read_state(self, vs):
self.intopic.publish(VectorStamped(point=[-np.dot(self.K, np.asarray(vs.point))]))
class LQRController(StateFeedback):
def __init__(self, intopic, outtopic,
A=None, B=None, Q=None, R=None):
if A is None and B is None:
A = np.array([[0., 1, 0],
[0, 0, 1],
[0, 0, 0]])
B = np.array([[0.], [0], [1]])
if Q is None and R is None:
Q = np.diag([1.,1,1])
R = np.diag([1.])
K, S, E = lqr(A,B,Q,R)
StateFeedback.__init__(self, intopic, outtopic, K)
if __name__ == "__main__":
rospy.init_node("lqr", anonymous=True)
n = 1 # Number of output dimensions
m = 3 # Number of derivatives
A = np.diag(np.ones(m-1), k=1)
B = np.zeros((m, 1))
B[-1,0] = 1.0
Q = np.diag(np.ones(m))
R = np.diag([1.])
lqrc = LQRController("input", "output", A, B, Q, R)
rospy.spin()
|
0453402da8ca1522fc08ce4d774a2664953348ee | threaded_messages/management.py | threaded_messages/management.py | from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
| from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_migrate.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
| Use post_migrate signal instead of post_syncdb | Use post_migrate signal instead of post_syncdb
| Python | mit | siovene/django-threaded-messages,siovene/django-threaded-messages,siovene/django-threaded-messages | from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
Use post_migrate signal instead of post_syncdb | from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_migrate.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
| <commit_before>from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
<commit_msg>Use post_migrate signal instead of post_syncdb<commit_after> | from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_migrate.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
| from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
Use post_migrate signal instead of post_syncdbfrom django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_migrate.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
| <commit_before>from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
<commit_msg>Use post_migrate signal instead of post_syncdb<commit_after>from django.conf import settings
from django.utils.translation import ugettext_noop as _
from django.db.models import signals
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("received_email", _("Private messages"), _("(this is highly recommended)"))
signals.post_migrate.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes (Threaded Messages) as notification app not found"
|
6a830973fa8f29278015d55819dcbd87f0472ac9 | post_office/test_urls.py | post_office/test_urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls), name='admin'),
)
| from django.conf.urls import include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls), name='admin'),
]
| Fix Django 1.10 url patterns warning | Fix Django 1.10 url patterns warning
| Python | mit | ui/django-post_office,JostCrow/django-post_office,RafRaf/django-post_office,ui/django-post_office,yprez/django-post_office,jrief/django-post_office | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls), name='admin'),
)
Fix Django 1.10 url patterns warning | from django.conf.urls import include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls), name='admin'),
]
| <commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls), name='admin'),
)
<commit_msg>Fix Django 1.10 url patterns warning<commit_after> | from django.conf.urls import include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls), name='admin'),
]
| from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls), name='admin'),
)
Fix Django 1.10 url patterns warningfrom django.conf.urls import include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls), name='admin'),
]
| <commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls), name='admin'),
)
<commit_msg>Fix Django 1.10 url patterns warning<commit_after>from django.conf.urls import include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls), name='admin'),
]
|
480c89d81e1610d698269c41f4543c38193bef13 | test/test_orthomcl_database.py | test/test_orthomcl_database.py | import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
dbname = orthomcl_database.create_database()
orthomcl_database.delete_database(dbname)
| import MySQLdb
import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
self.credentials = orthomcl_database._get_root_credentials()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
'''
Create a configuration file, and ensure the contents match assumptions.
'''
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
'''
Create a database, connect to it and perform a simple select query, verify the outcome and delete the database.
'''
try:
# Create database
dbname = orthomcl_database.create_database()
# Access database as restricted user
db_connection = MySQLdb.connect(host=self.credentials.host,
port=self.credentials.port,
user='orthomcl', passwd='pass')
db_connection.query('SELECT 1')
result = db_connection.store_result()
self.assertEqual(1L, result.fetch_row()[0][0])
db_connection.close()
finally:
if dbname:
# Delete database
orthomcl_database.delete_database(dbname)
| Expand test to include query on the created database as restricted user | Expand test to include query on the created database as restricted user | Python | mit | ODoSE/odose.nl | import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
dbname = orthomcl_database.create_database()
orthomcl_database.delete_database(dbname)
Expand test to include query on the created database as restricted user | import MySQLdb
import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
self.credentials = orthomcl_database._get_root_credentials()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
'''
Create a configuration file, and ensure the contents match assumptions.
'''
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
'''
Create a database, connect to it and perform a simple select query, verify the outcome and delete the database.
'''
try:
# Create database
dbname = orthomcl_database.create_database()
# Access database as restricted user
db_connection = MySQLdb.connect(host=self.credentials.host,
port=self.credentials.port,
user='orthomcl', passwd='pass')
db_connection.query('SELECT 1')
result = db_connection.store_result()
self.assertEqual(1L, result.fetch_row()[0][0])
db_connection.close()
finally:
if dbname:
# Delete database
orthomcl_database.delete_database(dbname)
| <commit_before>import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
dbname = orthomcl_database.create_database()
orthomcl_database.delete_database(dbname)
<commit_msg>Expand test to include query on the created database as restricted user<commit_after> | import MySQLdb
import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
self.credentials = orthomcl_database._get_root_credentials()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
'''
Create a configuration file, and ensure the contents match assumptions.
'''
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
'''
Create a database, connect to it and perform a simple select query, verify the outcome and delete the database.
'''
try:
# Create database
dbname = orthomcl_database.create_database()
# Access database as restricted user
db_connection = MySQLdb.connect(host=self.credentials.host,
port=self.credentials.port,
user='orthomcl', passwd='pass')
db_connection.query('SELECT 1')
result = db_connection.store_result()
self.assertEqual(1L, result.fetch_row()[0][0])
db_connection.close()
finally:
if dbname:
# Delete database
orthomcl_database.delete_database(dbname)
| import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
dbname = orthomcl_database.create_database()
orthomcl_database.delete_database(dbname)
Expand test to include query on the created database as restricted userimport MySQLdb
import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
self.credentials = orthomcl_database._get_root_credentials()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
'''
Create a configuration file, and ensure the contents match assumptions.
'''
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
'''
Create a database, connect to it and perform a simple select query, verify the outcome and delete the database.
'''
try:
# Create database
dbname = orthomcl_database.create_database()
# Access database as restricted user
db_connection = MySQLdb.connect(host=self.credentials.host,
port=self.credentials.port,
user='orthomcl', passwd='pass')
db_connection.query('SELECT 1')
result = db_connection.store_result()
self.assertEqual(1L, result.fetch_row()[0][0])
db_connection.close()
finally:
if dbname:
# Delete database
orthomcl_database.delete_database(dbname)
| <commit_before>import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
dbname = orthomcl_database.create_database()
orthomcl_database.delete_database(dbname)
<commit_msg>Expand test to include query on the created database as restricted user<commit_after>import MySQLdb
import shutil
import tempfile
import unittest
import orthomcl_database
class Test(unittest.TestCase):
def setUp(self):
self.run_dir = tempfile.mkdtemp()
self.credentials = orthomcl_database._get_root_credentials()
def tearDown(self):
shutil.rmtree(self.run_dir)
def test_get_configuration_file(self):
'''
Create a configuration file, and ensure the contents match assumptions.
'''
conffile = orthomcl_database.get_configuration_file(self.run_dir, 'test_dbname', 5)
with open(conffile) as reader:
content = reader.read()
self.assertIn('orthomcl', content)
self.assertIn('127.0.0.1', content)
self.assertIn('mysql', content)
self.assertIn('evalueExponentCutoff=5\n', content)
def test_create_database(self):
'''
Create a database, connect to it and perform a simple select query, verify the outcome and delete the database.
'''
try:
# Create database
dbname = orthomcl_database.create_database()
# Access database as restricted user
db_connection = MySQLdb.connect(host=self.credentials.host,
port=self.credentials.port,
user='orthomcl', passwd='pass')
db_connection.query('SELECT 1')
result = db_connection.store_result()
self.assertEqual(1L, result.fetch_row()[0][0])
db_connection.close()
finally:
if dbname:
# Delete database
orthomcl_database.delete_database(dbname)
|
082ac65c32c323c36036e0ddac140a87942e9b00 | tests/window/WINDOW_CAPTION.py | tests/window/WINDOW_CAPTION.py | #!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
| Make windows bigger in this test so the captions can be read. | Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40781
| Python | bsd-3-clause | infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore | #!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40781 | #!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
<commit_msg>Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40781<commit_after> | #!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40781#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
<commit_msg>Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40781<commit_after>#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
4f4c3fabe1ccb91ca8f510a6ab81b6f2eb588c17 | openstack/tests/functional/telemetry/v2/test_statistics.py | openstack/tests/functional/telemetry/v2/test_statistics.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
found_something = False
for met in self.conn.telemetry.meters():
try:
stat = next(self.conn.telemetry.statistics(met))
self.assertIn('period_end', stat)
found_something = True
except Exception:
pass
self.assertTrue(found_something)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
for met in self.conn.telemetry.meters():
for stat in self.conn.telemetry.statistics(met):
self.assertTrue(stat.period_end_at is not None)
break
| Fix the telemetry statistics test | Fix the telemetry statistics test
This test worked fine on devstack, but failed on the test gate
because not all meters have statistics. Look for a meter with
statistics.
Partial-bug: #1665495
Change-Id: Ife0f1f11c70e926801b48000dd0b4e9d863a865f
| Python | apache-2.0 | briancurtin/python-openstacksdk,dtroyer/python-openstacksdk,dtroyer/python-openstacksdk,openstack/python-openstacksdk,openstack/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,stackforge/python-openstacksdk | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
found_something = False
for met in self.conn.telemetry.meters():
try:
stat = next(self.conn.telemetry.statistics(met))
self.assertIn('period_end', stat)
found_something = True
except Exception:
pass
self.assertTrue(found_something)
Fix the telemetry statistics test
This test worked fine on devstack, but failed on the test gate
because not all meters have statistics. Look for a meter with
statistics.
Partial-bug: #1665495
Change-Id: Ife0f1f11c70e926801b48000dd0b4e9d863a865f | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
for met in self.conn.telemetry.meters():
for stat in self.conn.telemetry.statistics(met):
self.assertTrue(stat.period_end_at is not None)
break
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
found_something = False
for met in self.conn.telemetry.meters():
try:
stat = next(self.conn.telemetry.statistics(met))
self.assertIn('period_end', stat)
found_something = True
except Exception:
pass
self.assertTrue(found_something)
<commit_msg>Fix the telemetry statistics test
This test worked fine on devstack, but failed on the test gate
because not all meters have statistics. Look for a meter with
statistics.
Partial-bug: #1665495
Change-Id: Ife0f1f11c70e926801b48000dd0b4e9d863a865f<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
for met in self.conn.telemetry.meters():
for stat in self.conn.telemetry.statistics(met):
self.assertTrue(stat.period_end_at is not None)
break
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
found_something = False
for met in self.conn.telemetry.meters():
try:
stat = next(self.conn.telemetry.statistics(met))
self.assertIn('period_end', stat)
found_something = True
except Exception:
pass
self.assertTrue(found_something)
Fix the telemetry statistics test
This test worked fine on devstack, but failed on the test gate
because not all meters have statistics. Look for a meter with
statistics.
Partial-bug: #1665495
Change-Id: Ife0f1f11c70e926801b48000dd0b4e9d863a865f# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
for met in self.conn.telemetry.meters():
for stat in self.conn.telemetry.statistics(met):
self.assertTrue(stat.period_end_at is not None)
break
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
found_something = False
for met in self.conn.telemetry.meters():
try:
stat = next(self.conn.telemetry.statistics(met))
self.assertIn('period_end', stat)
found_something = True
except Exception:
pass
self.assertTrue(found_something)
<commit_msg>Fix the telemetry statistics test
This test worked fine on devstack, but failed on the test gate
because not all meters have statistics. Look for a meter with
statistics.
Partial-bug: #1665495
Change-Id: Ife0f1f11c70e926801b48000dd0b4e9d863a865f<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestStatistics(base.BaseFunctionalTest):
def test_list(self):
for met in self.conn.telemetry.meters():
for stat in self.conn.telemetry.statistics(met):
self.assertTrue(stat.period_end_at is not None)
break
|
0dabb6f4b18ff73f16088b207894d5e647494afb | colors.py | colors.py | from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import hexlify, unhexlify
import sys
def pokecolor(pos, string):
color = textwrap.wrap(string, 4)
tcp.pokemem(pos, struct.unpack(">I", color[0])[0])
tcp.pokemem(pos + 4, struct.unpack(">I", color[1])[0])
tcp.pokemem(pos + 8, struct.unpack(">I", color[2])[0])
tcp.pokemem(pos + 12, struct.unpack(">I", color[3])[0])
tcp = TCPGecko("192.168.137.3")
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3):
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0)
tcp.writestr(0x12D14F64, Colors) #Only overwrites currently loaded color
#You need to figure out timing to apply
tcp.s.close()
print("Done.")
| from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import unhexlify
import sys
tcp = TCPGecko("192.168.0.8") #Wii U IP address
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3): #Create the RGB floats
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0) #Alpha
tcp.writestr(0x12D18F64, Colors) #Only overwrites currently loaded color
#Run a command right after the lobby is "ready"
tcp.s.close()
print("Done!")
| Update ColorHax to 2.3.0 address | Update ColorHax to 2.3.0 address
Need to make pyGecko auto-find it all
| Python | mit | wiiudev/pyGecko,wiiudev/pyGecko | from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import hexlify, unhexlify
import sys
def pokecolor(pos, string):
color = textwrap.wrap(string, 4)
tcp.pokemem(pos, struct.unpack(">I", color[0])[0])
tcp.pokemem(pos + 4, struct.unpack(">I", color[1])[0])
tcp.pokemem(pos + 8, struct.unpack(">I", color[2])[0])
tcp.pokemem(pos + 12, struct.unpack(">I", color[3])[0])
tcp = TCPGecko("192.168.137.3")
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3):
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0)
tcp.writestr(0x12D14F64, Colors) #Only overwrites currently loaded color
#You need to figure out timing to apply
tcp.s.close()
print("Done.")
Update ColorHax to 2.3.0 address
Need to make pyGecko auto-find it all | from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import unhexlify
import sys
tcp = TCPGecko("192.168.0.8") #Wii U IP address
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3): #Create the RGB floats
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0) #Alpha
tcp.writestr(0x12D18F64, Colors) #Only overwrites currently loaded color
#Run a command right after the lobby is "ready"
tcp.s.close()
print("Done!")
| <commit_before>from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import hexlify, unhexlify
import sys
def pokecolor(pos, string):
color = textwrap.wrap(string, 4)
tcp.pokemem(pos, struct.unpack(">I", color[0])[0])
tcp.pokemem(pos + 4, struct.unpack(">I", color[1])[0])
tcp.pokemem(pos + 8, struct.unpack(">I", color[2])[0])
tcp.pokemem(pos + 12, struct.unpack(">I", color[3])[0])
tcp = TCPGecko("192.168.137.3")
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3):
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0)
tcp.writestr(0x12D14F64, Colors) #Only overwrites currently loaded color
#You need to figure out timing to apply
tcp.s.close()
print("Done.")
<commit_msg>Update ColorHax to 2.3.0 address
Need to make pyGecko auto-find it all<commit_after> | from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import unhexlify
import sys
tcp = TCPGecko("192.168.0.8") #Wii U IP address
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3): #Create the RGB floats
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0) #Alpha
tcp.writestr(0x12D18F64, Colors) #Only overwrites currently loaded color
#Run a command right after the lobby is "ready"
tcp.s.close()
print("Done!")
| from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import hexlify, unhexlify
import sys
def pokecolor(pos, string):
color = textwrap.wrap(string, 4)
tcp.pokemem(pos, struct.unpack(">I", color[0])[0])
tcp.pokemem(pos + 4, struct.unpack(">I", color[1])[0])
tcp.pokemem(pos + 8, struct.unpack(">I", color[2])[0])
tcp.pokemem(pos + 12, struct.unpack(">I", color[3])[0])
tcp = TCPGecko("192.168.137.3")
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3):
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0)
tcp.writestr(0x12D14F64, Colors) #Only overwrites currently loaded color
#You need to figure out timing to apply
tcp.s.close()
print("Done.")
Update ColorHax to 2.3.0 address
Need to make pyGecko auto-find it allfrom tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import unhexlify
import sys
tcp = TCPGecko("192.168.0.8") #Wii U IP address
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3): #Create the RGB floats
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0) #Alpha
tcp.writestr(0x12D18F64, Colors) #Only overwrites currently loaded color
#Run a command right after the lobby is "ready"
tcp.s.close()
print("Done!")
| <commit_before>from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import hexlify, unhexlify
import sys
def pokecolor(pos, string):
color = textwrap.wrap(string, 4)
tcp.pokemem(pos, struct.unpack(">I", color[0])[0])
tcp.pokemem(pos + 4, struct.unpack(">I", color[1])[0])
tcp.pokemem(pos + 8, struct.unpack(">I", color[2])[0])
tcp.pokemem(pos + 12, struct.unpack(">I", color[3])[0])
tcp = TCPGecko("192.168.137.3")
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3):
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0)
tcp.writestr(0x12D14F64, Colors) #Only overwrites currently loaded color
#You need to figure out timing to apply
tcp.s.close()
print("Done.")
<commit_msg>Update ColorHax to 2.3.0 address
Need to make pyGecko auto-find it all<commit_after>from tcpgecko import TCPGecko
from textwrap import wrap
from struct import pack
from binascii import unhexlify
import sys
tcp = TCPGecko("192.168.0.8") #Wii U IP address
Colors = b""
for i in range(1, 4): #Ignores Alpha since it doesn't use it
Color = wrap(sys.argv[i], 2) #Split it into 2 character chunks
for j in range(3): #Create the RGB floats
Colors += pack(">f", ord(unhexlify(Color[j])) / 256)
Colors += pack(">f", 1.0) #Alpha
tcp.writestr(0x12D18F64, Colors) #Only overwrites currently loaded color
#Run a command right after the lobby is "ready"
tcp.s.close()
print("Done!")
|
14b3ac31e7c46ce7c0482fd926a5306234a4f1e6 | taipan/_compat.py | taipan/_compat.py | """
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
| """
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY26 = sys.version[:2] == (2, 6)
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
| Fix the accidental removal of IS_PY26 | Fix the accidental removal of IS_PY26
| Python | bsd-2-clause | Xion/taipan | """
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
Fix the accidental removal of IS_PY26 | """
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY26 = sys.version[:2] == (2, 6)
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
| <commit_before>"""
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
<commit_msg>Fix the accidental removal of IS_PY26<commit_after> | """
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY26 = sys.version[:2] == (2, 6)
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
| """
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
Fix the accidental removal of IS_PY26"""
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY26 = sys.version[:2] == (2, 6)
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
| <commit_before>"""
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
<commit_msg>Fix the accidental removal of IS_PY26<commit_after>"""
Compatibility shims for different Python versions and platforms.
"""
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import sys
IS_PY26 = sys.version[:2] == (2, 6)
IS_PY3 = sys.version_info[0] == 3
import platform
IS_PYPY = platform.python_implementation() == 'PyPy'
unichr = chr if IS_PY3 else unichr
xrange = range if IS_PY3 else xrange
if IS_PY3:
ifilter = filter
imap = map
izip = zip
from itertools import (
zip_longest as izip_longest,
filterfalse as ifilterfalse,
)
else:
from itertools import ifilter, ifilterfalse, imap, izip, izip_longest
|
f52f2aafc204c3b19e04d05ac6fc1f10a4ea2463 | rcbi/rcbi/items.py | rcbi/rcbi/items.py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock, description, location, timestamp
variants = scrapy.Field()
| # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock_state, stock_text, description, location, timestamp
variants = scrapy.Field()
| Update comment to be more specific about stock fields | Update comment to be more specific about stock fields
| Python | apache-2.0 | rcbuild-info/scrape,rcbuild-info/scrape | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock, description, location, timestamp
variants = scrapy.Field()
Update comment to be more specific about stock fields | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock_state, stock_text, description, location, timestamp
variants = scrapy.Field()
| <commit_before># -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock, description, location, timestamp
variants = scrapy.Field()
<commit_msg>Update comment to be more specific about stock fields<commit_after> | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock_state, stock_text, description, location, timestamp
variants = scrapy.Field()
| # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock, description, location, timestamp
variants = scrapy.Field()
Update comment to be more specific about stock fields# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock_state, stock_text, description, location, timestamp
variants = scrapy.Field()
| <commit_before># -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock, description, location, timestamp
variants = scrapy.Field()
<commit_msg>Update comment to be more specific about stock fields<commit_after># -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class Part(scrapy.Item):
name = scrapy.Field()
site = scrapy.Field()
manufacturer = scrapy.Field()
sku = scrapy.Field()
weight = scrapy.Field()
# url, price, quantity, stock_state, stock_text, description, location, timestamp
variants = scrapy.Field()
|
288508e0693da5dbfc467a01ac18b31c4f8cc16c | nymms/tests/test_registry.py | nymms/tests/test_registry.py | import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def tearDown(self):
# Ensure we have a fresh registry after every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
| import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def setUp(self):
# Ensure we have a fresh registry before every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
print Command.registry
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
| Clear command registry BEFORE each test. | Clear command registry BEFORE each test.
| Python | bsd-2-clause | cloudtools/nymms | import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def tearDown(self):
# Ensure we have a fresh registry after every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
Clear command registry BEFORE each test. | import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def setUp(self):
# Ensure we have a fresh registry before every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
print Command.registry
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
| <commit_before>import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def tearDown(self):
# Ensure we have a fresh registry after every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
<commit_msg>Clear command registry BEFORE each test.<commit_after> | import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def setUp(self):
# Ensure we have a fresh registry before every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
print Command.registry
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
| import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def tearDown(self):
# Ensure we have a fresh registry after every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
Clear command registry BEFORE each test.import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def setUp(self):
# Ensure we have a fresh registry before every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
print Command.registry
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
| <commit_before>import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def tearDown(self):
# Ensure we have a fresh registry after every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
<commit_msg>Clear command registry BEFORE each test.<commit_after>import unittest
from nymms import registry
from nymms.resources import Command, MonitoringGroup
from weakref import WeakValueDictionary
class TestRegistry(unittest.TestCase):
def setUp(self):
# Ensure we have a fresh registry before every test
Command.registry.clear()
def test_empty_registry(self):
self.assertEqual(Command.registry, WeakValueDictionary())
def test_register_object(self):
# First test it's empty
self.assertEqual(Command.registry, WeakValueDictionary())
# Add a command
command = Command('test_command', '/bin/true')
# verify that there is only a single command in the registry
self.assertEqual(len(Command.registry), 1)
# Verify that the registered command is the same as command
self.assertIs(Command.registry[command.name], command)
def test_duplicate_register(self):
# add a command
print Command.registry
Command('test_command', '/bin/true')
with self.assertRaises(registry.DuplicateEntryError):
Command('test_command', '/bin/true')
def test_invalid_resource_register(self):
with self.assertRaises(TypeError):
Command.registry['test'] = MonitoringGroup('test_group')
|
8715e112a8299bc6db831c6b2df08901107a550a | analysis/opensimulator-stats-analyzer/src/ostagraph.py | analysis/opensimulator-stats-analyzer/src/ostagraph.py | #!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
plt.show()
else:
print "No such stat as %s" % (opts.select) | #!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from pylab import *
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'--out',
help = "Path to output the graph rather the interactively display. Filename extension determines graphics type (e.g. \"graph.jpg\")",
)
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
if 'out' in opts:
savefig(opts.out)
else:
plt.show()
else:
print "No such stat as %s" % (opts.select) | Add --out option to generate graph at specific location instead of interactive display | Add --out option to generate graph at specific location instead of interactive display
| Python | bsd-3-clause | justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools | #!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
plt.show()
else:
print "No such stat as %s" % (opts.select) Add --out option to generate graph at specific location instead of interactive display | #!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from pylab import *
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'--out',
help = "Path to output the graph rather the interactively display. Filename extension determines graphics type (e.g. \"graph.jpg\")",
)
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
if 'out' in opts:
savefig(opts.out)
else:
plt.show()
else:
print "No such stat as %s" % (opts.select) | <commit_before>#!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
plt.show()
else:
print "No such stat as %s" % (opts.select) <commit_msg>Add --out option to generate graph at specific location instead of interactive display<commit_after> | #!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from pylab import *
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'--out',
help = "Path to output the graph rather the interactively display. Filename extension determines graphics type (e.g. \"graph.jpg\")",
)
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
if 'out' in opts:
savefig(opts.out)
else:
plt.show()
else:
print "No such stat as %s" % (opts.select) | #!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
plt.show()
else:
print "No such stat as %s" % (opts.select) Add --out option to generate graph at specific location instead of interactive display#!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from pylab import *
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'--out',
help = "Path to output the graph rather the interactively display. Filename extension determines graphics type (e.g. \"graph.jpg\")",
)
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
if 'out' in opts:
savefig(opts.out)
else:
plt.show()
else:
print "No such stat as %s" % (opts.select) | <commit_before>#!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
plt.show()
else:
print "No such stat as %s" % (opts.select) <commit_msg>Add --out option to generate graph at specific location instead of interactive display<commit_after>#!/usr/bin/python
import argparse
import matplotlib.pyplot as plt
from pylab import *
from osta.osta import *
############
### MAIN ###
############
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument(
'--select',
help = "Select the full name of a stat to graph (e.g. \"scene.Keynote 1.RootAgents\")")
parser.add_argument(
'--out',
help = "Path to output the graph rather the interactively display. Filename extension determines graphics type (e.g. \"graph.jpg\")",
)
parser.add_argument(
'statsLogPath',
help = "Path to the stats log file.",
metavar = "stats-log-path")
opts = parser.parse_args()
data = Osta.parse(opts.statsLogPath)
# TODO: We will move this kind of check inside Osta shortly
(category, container, name) = opts.select.split(".")
if (category in data and container in data[category] and name in data[category][container]):
plt.plot(data[category][container][name]['abs']['values'])
plt.ylabel(opts.select)
if 'out' in opts:
savefig(opts.out)
else:
plt.show()
else:
print "No such stat as %s" % (opts.select) |
c1dc571faa9bf2ae0e0a580365943806826ced4a | src/adhocracy_spd/adhocracy_spd/workflows/digital_leben.py | src/adhocracy_spd/adhocracy_spd/workflows/digital_leben.py | """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None ], # noqa
['create_rate', 'Allow', None, None, None ], # noqa
['edit_rate', None, None, 'Allow', None ], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None], # noqa
['create_rate', 'Allow', None, None, None], # noqa
['edit_rate', None, None, 'Allow', None], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| Make flake8 happy for spd | Make flake8 happy for spd
| Python | agpl-3.0 | liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator | """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None ], # noqa
['create_rate', 'Allow', None, None, None ], # noqa
['edit_rate', None, None, 'Allow', None ], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
Make flake8 happy for spd | """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None], # noqa
['create_rate', 'Allow', None, None, None], # noqa
['edit_rate', None, None, 'Allow', None], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| <commit_before>"""Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None ], # noqa
['create_rate', 'Allow', None, None, None ], # noqa
['edit_rate', None, None, 'Allow', None ], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
<commit_msg>Make flake8 happy for spd<commit_after> | """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None], # noqa
['create_rate', 'Allow', None, None, None], # noqa
['edit_rate', None, None, 'Allow', None], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| """Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None ], # noqa
['create_rate', 'Allow', None, None, None ], # noqa
['edit_rate', None, None, 'Allow', None ], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
Make flake8 happy for spd"""Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None], # noqa
['create_rate', 'Allow', None, None, None], # noqa
['edit_rate', None, None, 'Allow', None], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
| <commit_before>"""Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': [ 'participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None ], # noqa
['create_rate', 'Allow', None, None, None ], # noqa
['edit_rate', None, None, 'Allow', None ], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
<commit_msg>Make flake8 happy for spd<commit_after>"""Digital leben workflow."""
from adhocracy_core.workflows import add_workflow
from adhocracy_core.workflows.standard import standard_meta
digital_leben_meta = standard_meta \
.transform(('states', 'participate', 'acm'),
{'principals': ['participant', 'moderator', 'creator', 'initiator'], # noqa
'permissions':
[['create_proposal', None, None, None, 'Allow'], # noqa
['edit_proposal', None, None, 'Allow', 'Allow'], # noqa
['create_comment', 'Allow', 'Allow', None, 'Allow'], # noqa
['edit_comment', None, None, 'Allow', None], # noqa
['create_rate', 'Allow', None, None, None], # noqa
['edit_rate', None, None, 'Allow', None], # noqa
]})
def includeme(config):
"""Add workflow."""
add_workflow(config.registry, digital_leben_meta, 'digital_leben')
|
9ad85a6986c8350cb082f23d9508301c40ca440d | resolwe_bio/api/views.py | resolwe_bio/api/views.py | from django.db.models import Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
| from django.db.models import Max, Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
| Order samples by date created of the newest Data | Order samples by date created of the newest Data
| Python | apache-2.0 | genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio | from django.db.models import Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
Order samples by date created of the newest Data | from django.db.models import Max, Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
| <commit_before>from django.db.models import Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
<commit_msg>Order samples by date created of the newest Data<commit_after> | from django.db.models import Max, Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
| from django.db.models import Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
Order samples by date created of the newest Datafrom django.db.models import Max, Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
| <commit_before>from django.db.models import Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator'))
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
<commit_msg>Order samples by date created of the newest Data<commit_after>from django.db.models import Max, Q
from rest_framework.decorators import list_route
from rest_framework.response import Response
from resolwe.flow.models import Collection
from resolwe.flow.views import CollectionViewSet
class SampleViewSet(CollectionViewSet):
queryset = Collection.objects.filter(descriptor_schema__slug='sample').prefetch_related('descriptor_schema')
@list_route(methods=[u'get'])
def annotated(self, request):
"""Return list of annotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
Q(descriptor__has_key='geo') & Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
@list_route(methods=[u'get'])
def unannotated(self, request):
"""Return list of unannotated `Samples`."""
queryset = self.get_queryset().annotate(
latest_date=Max('data__created')
).filter(
~Q(descriptor__has_key='geo') | ~Q(descriptor__geo__has_key='annotator')
).order_by('-latest_date')
serializer = self.serializer_class(queryset, many=True, context={'request': request})
return Response(serializer.data)
|
08995bcb577276af1d5b2b8ed8eb68d2678ddc4d | game/tests.py | game/tests.py | import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse | import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse
def create_user(question_text, days):
pass
class UserViewTests(TestCase):
def test_users_view_exists(self):
response = self.client.get(reverse('game:users'))
self.assertEqual(response.status_code, 200)
def test_users_view_with_no_users(self):
pass
def test_users_view_with_one_user(self):
pass
def test_users_view_with_two_users(self):
pass
def test_users_view_with_multiple_users(self):
pass
class RegistrationTests(TestCase):
pass | Set up test.py file and add skeleton for UserViewTests and RegistrationTests. Also add test to make sure user view exists | Set up test.py file and add skeleton for UserViewTests and RegistrationTests. Also add test to make sure user view exists
| Python | mit | shintouki/augmented-pandemic,shintouki/augmented-pandemic,shintouki/augmented-pandemic | import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverseSet up test.py file and add skeleton for UserViewTests and RegistrationTests. Also add test to make sure user view exists | import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse
def create_user(question_text, days):
pass
class UserViewTests(TestCase):
def test_users_view_exists(self):
response = self.client.get(reverse('game:users'))
self.assertEqual(response.status_code, 200)
def test_users_view_with_no_users(self):
pass
def test_users_view_with_one_user(self):
pass
def test_users_view_with_two_users(self):
pass
def test_users_view_with_multiple_users(self):
pass
class RegistrationTests(TestCase):
pass | <commit_before>import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse<commit_msg>Set up test.py file and add skeleton for UserViewTests and RegistrationTests. Also add test to make sure user view exists<commit_after> | import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse
def create_user(question_text, days):
pass
class UserViewTests(TestCase):
def test_users_view_exists(self):
response = self.client.get(reverse('game:users'))
self.assertEqual(response.status_code, 200)
def test_users_view_with_no_users(self):
pass
def test_users_view_with_one_user(self):
pass
def test_users_view_with_two_users(self):
pass
def test_users_view_with_multiple_users(self):
pass
class RegistrationTests(TestCase):
pass | import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverseSet up test.py file and add skeleton for UserViewTests and RegistrationTests. Also add test to make sure user view existsimport datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse
def create_user(question_text, days):
pass
class UserViewTests(TestCase):
def test_users_view_exists(self):
response = self.client.get(reverse('game:users'))
self.assertEqual(response.status_code, 200)
def test_users_view_with_no_users(self):
pass
def test_users_view_with_one_user(self):
pass
def test_users_view_with_two_users(self):
pass
def test_users_view_with_multiple_users(self):
pass
class RegistrationTests(TestCase):
pass | <commit_before>import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse<commit_msg>Set up test.py file and add skeleton for UserViewTests and RegistrationTests. Also add test to make sure user view exists<commit_after>import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse
def create_user(question_text, days):
pass
class UserViewTests(TestCase):
def test_users_view_exists(self):
response = self.client.get(reverse('game:users'))
self.assertEqual(response.status_code, 200)
def test_users_view_with_no_users(self):
pass
def test_users_view_with_one_user(self):
pass
def test_users_view_with_two_users(self):
pass
def test_users_view_with_multiple_users(self):
pass
class RegistrationTests(TestCase):
pass |
617cda3f3d7732b28b127fc45cb7ad4344f0c77f | votes/views.py | votes/views.py | import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
user_details = json.loads(request.user.profile.details)
if not user_details:
return HttpResponse("User details invalid. Try logging in again.")
if not filter:
return HttpResponse("No filter given. Admin, fix it.")
print(user_details)
if not filter.matches(user_details):
print("User doesn't match filter.")
return HttpResponse("You are not eligible")
options = vote.option_set.all()
ctx = {}
ctx['vote'] = vote
ctx['options'] = options
return render_to_response("vote/vote_vote.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
| import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
from users.models import UserProfile
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
error = False
ctx = {}
ctx['vote'] = vote
options = vote.option_set.all()
ctx['options'] = options
try:
user_details = json.loads(request.user.profile.details)
if not filter:
ctx['alert_head'] = "No filter given."
ctx['alert_text'] = "This vote has not been configured properly."
error = True
elif not filter.matches(user_details):
ctx['alert_head'] = "Not eligible"
ctx['alert_text'] = "You are not eligible for this vote. Tough luck."
error = True
except UserProfile.DoesNotExist:
ctx['alert_head'] = "User details invalid."
ctx['alert_text'] = "Your user details could not be retrieved from CampusNet. Please log out and try again later."
error = True
if not error:
return render_to_response("vote/vote_vote.html", context=ctx)
else:
return render_to_response("vote/vote_error.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
| Add error handling to VoteView | Add error handling to VoteView
| Python | mit | OpenJUB/jay,kuboschek/jay,OpenJUB/jay,OpenJUB/jay,kuboschek/jay,kuboschek/jay | import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
user_details = json.loads(request.user.profile.details)
if not user_details:
return HttpResponse("User details invalid. Try logging in again.")
if not filter:
return HttpResponse("No filter given. Admin, fix it.")
print(user_details)
if not filter.matches(user_details):
print("User doesn't match filter.")
return HttpResponse("You are not eligible")
options = vote.option_set.all()
ctx = {}
ctx['vote'] = vote
ctx['options'] = options
return render_to_response("vote/vote_vote.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
Add error handling to VoteView | import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
from users.models import UserProfile
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
error = False
ctx = {}
ctx['vote'] = vote
options = vote.option_set.all()
ctx['options'] = options
try:
user_details = json.loads(request.user.profile.details)
if not filter:
ctx['alert_head'] = "No filter given."
ctx['alert_text'] = "This vote has not been configured properly."
error = True
elif not filter.matches(user_details):
ctx['alert_head'] = "Not eligible"
ctx['alert_text'] = "You are not eligible for this vote. Tough luck."
error = True
except UserProfile.DoesNotExist:
ctx['alert_head'] = "User details invalid."
ctx['alert_text'] = "Your user details could not be retrieved from CampusNet. Please log out and try again later."
error = True
if not error:
return render_to_response("vote/vote_vote.html", context=ctx)
else:
return render_to_response("vote/vote_error.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
| <commit_before>import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
user_details = json.loads(request.user.profile.details)
if not user_details:
return HttpResponse("User details invalid. Try logging in again.")
if not filter:
return HttpResponse("No filter given. Admin, fix it.")
print(user_details)
if not filter.matches(user_details):
print("User doesn't match filter.")
return HttpResponse("You are not eligible")
options = vote.option_set.all()
ctx = {}
ctx['vote'] = vote
ctx['options'] = options
return render_to_response("vote/vote_vote.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
<commit_msg>Add error handling to VoteView<commit_after> | import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
from users.models import UserProfile
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
error = False
ctx = {}
ctx['vote'] = vote
options = vote.option_set.all()
ctx['options'] = options
try:
user_details = json.loads(request.user.profile.details)
if not filter:
ctx['alert_head'] = "No filter given."
ctx['alert_text'] = "This vote has not been configured properly."
error = True
elif not filter.matches(user_details):
ctx['alert_head'] = "Not eligible"
ctx['alert_text'] = "You are not eligible for this vote. Tough luck."
error = True
except UserProfile.DoesNotExist:
ctx['alert_head'] = "User details invalid."
ctx['alert_text'] = "Your user details could not be retrieved from CampusNet. Please log out and try again later."
error = True
if not error:
return render_to_response("vote/vote_vote.html", context=ctx)
else:
return render_to_response("vote/vote_error.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
| import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
user_details = json.loads(request.user.profile.details)
if not user_details:
return HttpResponse("User details invalid. Try logging in again.")
if not filter:
return HttpResponse("No filter given. Admin, fix it.")
print(user_details)
if not filter.matches(user_details):
print("User doesn't match filter.")
return HttpResponse("You are not eligible")
options = vote.option_set.all()
ctx = {}
ctx['vote'] = vote
ctx['options'] = options
return render_to_response("vote/vote_vote.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
Add error handling to VoteViewimport json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
from users.models import UserProfile
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
error = False
ctx = {}
ctx['vote'] = vote
options = vote.option_set.all()
ctx['options'] = options
try:
user_details = json.loads(request.user.profile.details)
if not filter:
ctx['alert_head'] = "No filter given."
ctx['alert_text'] = "This vote has not been configured properly."
error = True
elif not filter.matches(user_details):
ctx['alert_head'] = "Not eligible"
ctx['alert_text'] = "You are not eligible for this vote. Tough luck."
error = True
except UserProfile.DoesNotExist:
ctx['alert_head'] = "User details invalid."
ctx['alert_text'] = "Your user details could not be retrieved from CampusNet. Please log out and try again later."
error = True
if not error:
return render_to_response("vote/vote_vote.html", context=ctx)
else:
return render_to_response("vote/vote_error.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
| <commit_before>import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
user_details = json.loads(request.user.profile.details)
if not user_details:
return HttpResponse("User details invalid. Try logging in again.")
if not filter:
return HttpResponse("No filter given. Admin, fix it.")
print(user_details)
if not filter.matches(user_details):
print("User doesn't match filter.")
return HttpResponse("You are not eligible")
options = vote.option_set.all()
ctx = {}
ctx['vote'] = vote
ctx['options'] = options
return render_to_response("vote/vote_vote.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
<commit_msg>Add error handling to VoteView<commit_after>import json
from django.shortcuts import render, get_object_or_404, render_to_response
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from votes.models import Vote
from filters.models import UserFilter
from users.models import UserProfile
# TODO Check eligibility on GET already
class VoteView(View):
@method_decorator(login_required)
def get(self, request, system_name, vote_name):
vote = get_object_or_404(Vote, machine_name=vote_name)
filter = vote.filter
error = False
ctx = {}
ctx['vote'] = vote
options = vote.option_set.all()
ctx['options'] = options
try:
user_details = json.loads(request.user.profile.details)
if not filter:
ctx['alert_head'] = "No filter given."
ctx['alert_text'] = "This vote has not been configured properly."
error = True
elif not filter.matches(user_details):
ctx['alert_head'] = "Not eligible"
ctx['alert_text'] = "You are not eligible for this vote. Tough luck."
error = True
except UserProfile.DoesNotExist:
ctx['alert_head'] = "User details invalid."
ctx['alert_text'] = "Your user details could not be retrieved from CampusNet. Please log out and try again later."
error = True
if not error:
return render_to_response("vote/vote_vote.html", context=ctx)
else:
return render_to_response("vote/vote_error.html", context=ctx)
def post(self, request):
return HttpResponse("TODO: Write vote processing code")
|
336769417acfdc7d61394008952dc124cc889b3c | changes/api/serializer/models/jobstep.py | changes/api/serializer/models/jobstep.py | from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
| from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'data': dict(instance.data),
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
| Add data to JobStep serializer | Add data to JobStep serializer
| Python | apache-2.0 | dropbox/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes | from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
Add data to JobStep serializer | from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'data': dict(instance.data),
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
| <commit_before>from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
<commit_msg>Add data to JobStep serializer<commit_after> | from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'data': dict(instance.data),
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
| from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
Add data to JobStep serializerfrom changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'data': dict(instance.data),
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
| <commit_before>from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
<commit_msg>Add data to JobStep serializer<commit_after>from changes.api.serializer import Serializer, register
from changes.models import JobStep
@register(JobStep)
class JobStepSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'phase': {
'id': instance.phase_id.hex,
},
'data': dict(instance.data),
'result': instance.result,
'status': instance.status,
'node': instance.node,
'duration': instance.duration,
'dateCreated': instance.date_created,
'dateStarted': instance.date_started,
'dateFinished': instance.date_finished,
}
|
32e70ee06be67cb9058b2da7dc1a714272c6a07a | pyQuantuccia/setup.py | pyQuantuccia/setup.py | import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
| import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
test_suite='tests',
ext_modules=[qu_ext]
)
| Add the location of tests. | Add the location of tests.
| Python | bsd-3-clause | jwg4/pyQuantuccia,jwg4/pyQuantuccia | import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
Add the location of tests. | import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
test_suite='tests',
ext_modules=[qu_ext]
)
| <commit_before>import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
<commit_msg>Add the location of tests.<commit_after> | import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
test_suite='tests',
ext_modules=[qu_ext]
)
| import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
Add the location of tests.import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
test_suite='tests',
ext_modules=[qu_ext]
)
| <commit_before>import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
<commit_msg>Add the location of tests.<commit_after>import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
include_dirs=['src/Quantuccia'],
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
author='Jack Grahl',
author_email='jack.grahl@gmail.com',
version='0.1.0',
packages=['pyQuantuccia'],
test_suite='tests',
ext_modules=[qu_ext]
)
|
6d97b723915e5de7a008e5d7bdd44e7883967fdc | retdec/tools/__init__.py | retdec/tools/__init__.py | #
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: {}.'.format(DEFAULT_API_URL)
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
| #
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: %(default)s.'
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
| Simplify the help message for the -k/--api-key parameter. | Simplify the help message for the -k/--api-key parameter.
We can use the '%(default)s' placeholder instead of string formatting.
| Python | mit | s3rvac/retdec-python | #
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: {}.'.format(DEFAULT_API_URL)
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
Simplify the help message for the -k/--api-key parameter.
We can use the '%(default)s' placeholder instead of string formatting. | #
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: %(default)s.'
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
| <commit_before>#
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: {}.'.format(DEFAULT_API_URL)
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
<commit_msg>Simplify the help message for the -k/--api-key parameter.
We can use the '%(default)s' placeholder instead of string formatting.<commit_after> | #
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: %(default)s.'
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
| #
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: {}.'.format(DEFAULT_API_URL)
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
Simplify the help message for the -k/--api-key parameter.
We can use the '%(default)s' placeholder instead of string formatting.#
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: %(default)s.'
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
| <commit_before>#
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: {}.'.format(DEFAULT_API_URL)
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
<commit_msg>Simplify the help message for the -k/--api-key parameter.
We can use the '%(default)s' placeholder instead of string formatting.<commit_after>#
# Project: retdec-python
# Copyright: (c) 2015-2016 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: MIT, see the LICENSE file for more details
#
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: %(default)s.'
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
|
7d9d6893a9fc01ccb74c27be4749ab512a3893a0 | tests/settings.py | tests/settings.py | import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'imagekit.db',
},
}
| import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
| Use in memory database for tests | Use in memory database for tests
| Python | mit | saulshanabrook/django-simpleimages | import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'imagekit.db',
},
}
Use in memory database for tests | import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
| <commit_before>import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'imagekit.db',
},
}
<commit_msg>Use in memory database for tests<commit_after> | import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
| import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'imagekit.db',
},
}
Use in memory database for testsimport os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
| <commit_before>import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'imagekit.db',
},
}
<commit_msg>Use in memory database for tests<commit_after>import os
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('simpleimages', 'tests')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
|
bc8ecce3699b22ca0fd80c67172a39d6afded567 | scripts/generic-script.py | scripts/generic-script.py | #!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
| #!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(0, maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
| Fix bug in generic script when running from checkout | Fix bug in generic script when running from checkout
| Python | apache-2.0 | sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint | #!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
Fix bug in generic script when running from checkout | #!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(0, maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
| <commit_before>#!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
<commit_msg>Fix bug in generic script when running from checkout<commit_after> | #!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(0, maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
| #!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
Fix bug in generic script when running from checkout#!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(0, maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
| <commit_before>#!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
<commit_msg>Fix bug in generic script when running from checkout<commit_after>#!/usr/bin/python
import os
import sys
maybeRoot = os.path.dirname(os.path.dirname(__file__))
if os.path.exists(os.path.join(maybeRoot, 'mint', 'lib')):
sys.path.insert(0, maybeRoot)
from mint.scripts import some_module
sys.exit(some_module.Script().run())
|
c1893024ebd04a8eee14e2197791d6bab1985f2b | sheldon/storage.py | sheldon/storage.py | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
| # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
| Delete 'return' to refactor code | Delete 'return' to refactor code
| Python | mit | lises/sheldon | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
Delete 'return' to refactor code | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
| <commit_before># -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
<commit_msg>Delete 'return' to refactor code<commit_after> | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
| # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
Delete 'return' to refactor code# -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
| <commit_before># -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
<commit_msg>Delete 'return' to refactor code<commit_after># -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
|
8b5f3576ee30c1f50b3d3c5bd477b85ba4ec760e | plinth/modules/sso/__init__.py | plinth/modules/sso/__init__.py | #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = ['libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite']
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
| #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = [
'libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite',
'ttf-bitstream-vera'
]
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
| Make ttf-bitstream-vera a managed package | captcha: Make ttf-bitstream-vera a managed package
Signed-off-by: Joseph Nuthalpati <f3045f1a422c023fc6364ff0e3a18c260c53336e@thoughtworks.com>
Reviewed-by: James Valleroy <46e3063862880873c8617774e45d63de6172aab0@mailbox.org>
| Python | agpl-3.0 | harry-7/Plinth,harry-7/Plinth,harry-7/Plinth,harry-7/Plinth,harry-7/Plinth | #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = ['libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite']
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
captcha: Make ttf-bitstream-vera a managed package
Signed-off-by: Joseph Nuthalpati <f3045f1a422c023fc6364ff0e3a18c260c53336e@thoughtworks.com>
Reviewed-by: James Valleroy <46e3063862880873c8617774e45d63de6172aab0@mailbox.org> | #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = [
'libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite',
'ttf-bitstream-vera'
]
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
| <commit_before>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = ['libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite']
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
<commit_msg>captcha: Make ttf-bitstream-vera a managed package
Signed-off-by: Joseph Nuthalpati <f3045f1a422c023fc6364ff0e3a18c260c53336e@thoughtworks.com>
Reviewed-by: James Valleroy <46e3063862880873c8617774e45d63de6172aab0@mailbox.org><commit_after> | #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = [
'libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite',
'ttf-bitstream-vera'
]
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
| #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = ['libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite']
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
captcha: Make ttf-bitstream-vera a managed package
Signed-off-by: Joseph Nuthalpati <f3045f1a422c023fc6364ff0e3a18c260c53336e@thoughtworks.com>
Reviewed-by: James Valleroy <46e3063862880873c8617774e45d63de6172aab0@mailbox.org>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = [
'libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite',
'ttf-bitstream-vera'
]
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
| <commit_before>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = ['libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite']
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
<commit_msg>captcha: Make ttf-bitstream-vera a managed package
Signed-off-by: Joseph Nuthalpati <f3045f1a422c023fc6364ff0e3a18c260c53336e@thoughtworks.com>
Reviewed-by: James Valleroy <46e3063862880873c8617774e45d63de6172aab0@mailbox.org><commit_after>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = [
'libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl', 'flite',
'ttf-bitstream-vera'
]
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
|
3f065d3e6b54912b2d78a70b5fda98d0476c3f09 | tlsep/__main__.py | tlsep/__main__.py | # -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads
from tlsep import _dane
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
def _main(reactor):
return threads.deferToThread(_dane.lookup_tlsa_records, *sys.argv[1:])
task.react(_main)
| # -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads, defer
from tlsep import _dane, _tls
def printResult(res):
tlsaRecord, serverCertificate = res
print tlsaRecord.matchesCertificate(serverCertificate)
print tlsaRecord
def _main(reactor, parent_domain, port, proto):
d = defer.gatherResults([
threads.deferToThread(_dane.lookup_tlsa_records, parent_domain, port, proto),
_tls.retrieveCertificate(parent_domain, port)
])
d.addCallback(printResult)
return d
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
task.react(_main, sys.argv[1:])
| Check the server cert using matchesCertificate | Check the server cert using matchesCertificate
| Python | mit | hynek/tnw | # -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads
from tlsep import _dane
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
def _main(reactor):
return threads.deferToThread(_dane.lookup_tlsa_records, *sys.argv[1:])
task.react(_main)
Check the server cert using matchesCertificate | # -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads, defer
from tlsep import _dane, _tls
def printResult(res):
tlsaRecord, serverCertificate = res
print tlsaRecord.matchesCertificate(serverCertificate)
print tlsaRecord
def _main(reactor, parent_domain, port, proto):
d = defer.gatherResults([
threads.deferToThread(_dane.lookup_tlsa_records, parent_domain, port, proto),
_tls.retrieveCertificate(parent_domain, port)
])
d.addCallback(printResult)
return d
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
task.react(_main, sys.argv[1:])
| <commit_before># -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads
from tlsep import _dane
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
def _main(reactor):
return threads.deferToThread(_dane.lookup_tlsa_records, *sys.argv[1:])
task.react(_main)
<commit_msg>Check the server cert using matchesCertificate<commit_after> | # -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads, defer
from tlsep import _dane, _tls
def printResult(res):
tlsaRecord, serverCertificate = res
print tlsaRecord.matchesCertificate(serverCertificate)
print tlsaRecord
def _main(reactor, parent_domain, port, proto):
d = defer.gatherResults([
threads.deferToThread(_dane.lookup_tlsa_records, parent_domain, port, proto),
_tls.retrieveCertificate(parent_domain, port)
])
d.addCallback(printResult)
return d
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
task.react(_main, sys.argv[1:])
| # -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads
from tlsep import _dane
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
def _main(reactor):
return threads.deferToThread(_dane.lookup_tlsa_records, *sys.argv[1:])
task.react(_main)
Check the server cert using matchesCertificate# -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads, defer
from tlsep import _dane, _tls
def printResult(res):
tlsaRecord, serverCertificate = res
print tlsaRecord.matchesCertificate(serverCertificate)
print tlsaRecord
def _main(reactor, parent_domain, port, proto):
d = defer.gatherResults([
threads.deferToThread(_dane.lookup_tlsa_records, parent_domain, port, proto),
_tls.retrieveCertificate(parent_domain, port)
])
d.addCallback(printResult)
return d
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
task.react(_main, sys.argv[1:])
| <commit_before># -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads
from tlsep import _dane
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
def _main(reactor):
return threads.deferToThread(_dane.lookup_tlsa_records, *sys.argv[1:])
task.react(_main)
<commit_msg>Check the server cert using matchesCertificate<commit_after># -*- test-case-name: tlsep.test.test_scripts -*-
# Copyright (c) Hynek Schlawack, Richard Wall
# See LICENSE for details.
"""
eg tlsep full.cert.getdnsapi.net 443 tcp
"""
import sys
from twisted.internet import task, threads, defer
from tlsep import _dane, _tls
def printResult(res):
tlsaRecord, serverCertificate = res
print tlsaRecord.matchesCertificate(serverCertificate)
print tlsaRecord
def _main(reactor, parent_domain, port, proto):
d = defer.gatherResults([
threads.deferToThread(_dane.lookup_tlsa_records, parent_domain, port, proto),
_tls.retrieveCertificate(parent_domain, port)
])
d.addCallback(printResult)
return d
def main():
if len(sys.argv) != 4:
print "Usage: {0} parent_domain port protocol".format(sys.argv[0])
sys.exit(1)
task.react(_main, sys.argv[1:])
|
7ace27a6a114e381a30ac9760880b68277a868fc | python_scripts/mc_config.py | python_scripts/mc_config.py | #!/usr/bin/python
import yaml
def read_config():
yml_file = open('/home/dlarochelle/git_dev/mediacloud/mediawords.yml', 'rb')
config_file = yaml.load( yml_file )
return config_file
| #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
| Use relative path location for mediawords.yml. | Use relative path location for mediawords.yml.
| Python | agpl-3.0 | berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud | #!/usr/bin/python
import yaml
def read_config():
yml_file = open('/home/dlarochelle/git_dev/mediacloud/mediawords.yml', 'rb')
config_file = yaml.load( yml_file )
return config_file
Use relative path location for mediawords.yml. | #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
| <commit_before>#!/usr/bin/python
import yaml
def read_config():
yml_file = open('/home/dlarochelle/git_dev/mediacloud/mediawords.yml', 'rb')
config_file = yaml.load( yml_file )
return config_file
<commit_msg>Use relative path location for mediawords.yml.<commit_after> | #!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
| #!/usr/bin/python
import yaml
def read_config():
yml_file = open('/home/dlarochelle/git_dev/mediacloud/mediawords.yml', 'rb')
config_file = yaml.load( yml_file )
return config_file
Use relative path location for mediawords.yml.#!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
| <commit_before>#!/usr/bin/python
import yaml
def read_config():
yml_file = open('/home/dlarochelle/git_dev/mediacloud/mediawords.yml', 'rb')
config_file = yaml.load( yml_file )
return config_file
<commit_msg>Use relative path location for mediawords.yml.<commit_after>#!/usr/bin/python
import yaml
import os.path
_config_file_base_name = 'mediawords.yml'
_config_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'mediawords.yml'))
def read_config():
yml_file = open(_config_file_name, 'rb')
config_file = yaml.load( yml_file )
return config_file
|
0652ab317db79ad7859aafba505c016cd6d58614 | modules/combined/tests/catch_release/catch_release_test.py | modules/combined/tests/catch_release/catch_release_test.py | from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e']}
| from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e'],
SKIP : 'temp'
}
| Test passes on my machine, dies on others. Skipping for now. | Test passes on my machine, dies on others. Skipping for now.
r8830
| Python | lgpl-2.1 | WilkAndy/moose,jinmm1992/moose,zzyfisherman/moose,tonkmr/moose,yipenggao/moose,tonkmr/moose,raghavaggarwal/moose,lindsayad/moose,SudiptaBiswas/moose,zzyfisherman/moose,YaqiWang/moose,permcody/moose,raghavaggarwal/moose,laagesen/moose,nuclear-wizard/moose,jasondhales/moose,idaholab/moose,harterj/moose,raghavaggarwal/moose,jessecarterMOOSE/moose,shanestafford/moose,permcody/moose,joshua-cogliati-inl/moose,katyhuff/moose,permcody/moose,backmari/moose,raghavaggarwal/moose,stimpsonsg/moose,wgapl/moose,kasra83/moose,Chuban/moose,jasondhales/moose,wgapl/moose,joshua-cogliati-inl/moose,adamLange/moose,kasra83/moose,lindsayad/moose,friedmud/moose,cpritam/moose,sapitts/moose,cpritam/moose,lindsayad/moose,jbair34/moose,SudiptaBiswas/moose,roystgnr/moose,jiangwen84/moose,cpritam/moose,YaqiWang/moose,capitalaslash/moose,laagesen/moose,jbair34/moose,nuclear-wizard/moose,dschwen/moose,friedmud/moose,backmari/moose,andrsd/moose,zzyfisherman/moose,backmari/moose,shanestafford/moose,shanestafford/moose,friedmud/moose,nuclear-wizard/moose,roystgnr/moose,jbair34/moose,laagesen/moose,stimpsonsg/moose,sapitts/moose,kasra83/moose,yipenggao/moose,roystgnr/moose,tonkmr/moose,bwspenc/moose,roystgnr/moose,jinmm1992/moose,Chuban/moose,giopastor/moose,jessecarterMOOSE/moose,zzyfisherman/moose,sapitts/moose,milljm/moose,roystgnr/moose,shanestafford/moose,sapitts/moose,mellis13/moose,giopastor/moose,zzyfisherman/moose,giopastor/moose,liuwenf/moose,andrsd/moose,mellis13/moose,cpritam/moose,adamLange/moose,tonkmr/moose,jiangwen84/moose,dschwen/moose,jessecarterMOOSE/moose,joshua-cogliati-inl/moose,tonkmr/moose,WilkAndy/moose,andrsd/moose,capitalaslash/moose,YaqiWang/moose,jhbradley/moose,xy515258/moose,apc-llc/moose,mellis13/moose,harterj/moose,lindsayad/moose,danielru/moose,idaholab/moose,liuwenf/moose,roystgnr/moose,katyhuff/moose,apc-llc/moose,katyhuff/moose,liuwenf/moose,stimpsonsg/moose,laagesen/moose,idaholab/moose,capitalaslash/moose,cpritam/moose,kasra83/moose,bwspenc/moose,markr622/moose,andrsd/moose,jhbradley/moose,milljm/moose,backmari/moose,dschwen/moose,giopastor/moose,jasondhales/moose,nuclear-wizard/moose,harterj/moose,lindsayad/moose,laagesen/moose,harterj/moose,SudiptaBiswas/moose,danielru/moose,capitalaslash/moose,shanestafford/moose,apc-llc/moose,tonkmr/moose,joshua-cogliati-inl/moose,bwspenc/moose,cpritam/moose,zzyfisherman/moose,jessecarterMOOSE/moose,xy515258/moose,katyhuff/moose,adamLange/moose,SudiptaBiswas/moose,markr622/moose,SudiptaBiswas/moose,liuwenf/moose,Chuban/moose,yipenggao/moose,jinmm1992/moose,stimpsonsg/moose,harterj/moose,waxmanr/moose,permcody/moose,yipenggao/moose,wgapl/moose,markr622/moose,andrsd/moose,jasondhales/moose,waxmanr/moose,jessecarterMOOSE/moose,WilkAndy/moose,danielru/moose,danielru/moose,jhbradley/moose,jinmm1992/moose,waxmanr/moose,wgapl/moose,markr622/moose,friedmud/moose,jiangwen84/moose,WilkAndy/moose,liuwenf/moose,sapitts/moose,YaqiWang/moose,WilkAndy/moose,idaholab/moose,xy515258/moose,bwspenc/moose,roystgnr/moose,jbair34/moose,idaholab/moose,mellis13/moose,Chuban/moose,waxmanr/moose,shanestafford/moose,milljm/moose,bwspenc/moose,jiangwen84/moose,adamLange/moose,jhbradley/moose,WilkAndy/moose,dschwen/moose,milljm/moose,xy515258/moose,dschwen/moose,milljm/moose,liuwenf/moose,apc-llc/moose | from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e']}
Test passes on my machine, dies on others. Skipping for now.
r8830 | from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e'],
SKIP : 'temp'
}
| <commit_before>from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e']}
<commit_msg>Test passes on my machine, dies on others. Skipping for now.
r8830<commit_after> | from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e'],
SKIP : 'temp'
}
| from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e']}
Test passes on my machine, dies on others. Skipping for now.
r8830from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e'],
SKIP : 'temp'
}
| <commit_before>from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e']}
<commit_msg>Test passes on my machine, dies on others. Skipping for now.
r8830<commit_after>from options import *
test = { INPUT : 'catch_release.i',
EXODIFF : ['catch_release_out.e'],
SKIP : 'temp'
}
|
3b4d135556e2aca65050af3d6a7dc0975cd0b53b | amgut/handlers/FAQ.py | amgut/handlers/FAQ.py | from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html')
| from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html', loginerror='')
| Add loginerror blank to render call | Add loginerror blank to render call
| Python | bsd-3-clause | wasade/american-gut-web,mortonjt/american-gut-web,ElDeveloper/american-gut-web,josenavas/american-gut-web,PersonalGenomesOrg/american-gut-web,squirrelo/american-gut-web,squirrelo/american-gut-web,josenavas/american-gut-web,adamrp/american-gut-web,ElDeveloper/american-gut-web,biocore/american-gut-web,mortonjt/american-gut-web,wasade/american-gut-web,biocore/american-gut-web,biocore/american-gut-web,adamrp/american-gut-web,josenavas/american-gut-web,PersonalGenomesOrg/american-gut-web,ElDeveloper/american-gut-web,mortonjt/american-gut-web,adamrp/american-gut-web,squirrelo/american-gut-web,PersonalGenomesOrg/american-gut-web | from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html')
Add loginerror blank to render call | from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html', loginerror='')
| <commit_before>from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html')
<commit_msg>Add loginerror blank to render call<commit_after> | from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html', loginerror='')
| from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html')
Add loginerror blank to render callfrom amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html', loginerror='')
| <commit_before>from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html')
<commit_msg>Add loginerror blank to render call<commit_after>from amgut.handlers.base_handlers import BaseHandler
class FAQHandler(BaseHandler):
def get(self):
self.render('FAQ.html', loginerror='')
|
1113c12e71a45eb7bdac51181d62c990a0eb952e | pyOCD/target/target_k64f.py | pyOCD/target/target_k64f.py | """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1ffe0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
| """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1fff0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
| Adjust K64F target file memory configuration to match K64F datasheet. | Adjust K64F target file memory configuration to match K64F datasheet.
| Python | apache-2.0 | geky/pyDAPLink,bridadan/pyOCD,flit/pyOCD,tgarc/pyOCD,pyocd/pyOCD,geky/pyOCD,c1728p9/pyOCD,tgarc/pyOCD,mbedmicro/pyOCD,wjzhang/pyOCD,0xc0170/pyOCD,bridadan/pyOCD,oliviermartin/pyOCD,flit/pyOCD,matthewelse/pyOCD,mesheven/pyOCD,pyocd/pyOCD,oliviermartin/pyOCD,mbedmicro/pyOCD,adamgreen/pyOCD,matthewelse/pyOCD,mesheven/pyOCD,molejar/pyOCD,tgarc/pyOCD,mesheven/pyOCD,wjzhang/pyOCD,c1728p9/pyOCD,oliviermartin/pyOCD,0xc0170/pyOCD,bridadan/pyOCD,molejar/pyOCD,adamgreen/pyOCD,devanlai/pyOCD,matthewelse/pyOCD,adamgreen/pyOCD,molejar/pyOCD,geky/pyOCDgdb,devanlai/pyOCD,0xc0170/pyOCD,mbedmicro/pyOCD,wjzhang/pyOCD,geky/pyOCD,devanlai/pyOCD,c1728p9/pyOCD | """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1ffe0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
Adjust K64F target file memory configuration to match K64F datasheet. | """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1fff0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
| <commit_before>"""
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1ffe0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
<commit_msg>Adjust K64F target file memory configuration to match K64F datasheet.<commit_after> | """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1fff0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
| """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1ffe0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
Adjust K64F target file memory configuration to match K64F datasheet."""
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1fff0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
| <commit_before>"""
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1ffe0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
<commit_msg>Adjust K64F target file memory configuration to match K64F datasheet.<commit_after>"""
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from target_kinetis import Kinetis
import logging
class K64F(Kinetis):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x0" length="0x100000"> <property name="blocksize">0x1000</property></memory>
<memory type="ram" start="0x1fff0000" length="0x40000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(K64F, self).__init__(transport)
self.auto_increment_page_size = 0x400
self.mdm_idr = 0x001c0000
|
ba008f405a89d07e170d1b4c893246fb25ccba04 | benchmarks/benchmarks/bench_lib.py | benchmarks/benchmarks/bench_lib.py | """Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,), (10, 100), (10, 10, 10)],
[1, 3, (0, 5)],
["constant", "edge", "linear_ramp", "mean", "reflect", "wrap"],
]
def setup(self, shape, pad_width, mode):
# avoid np.zeros or np.empty's lazy allocation.
# np.full causes pagefaults to occur during setup
# instead of during the benchmark
self.array = np.full(shape, 0)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
| """Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,),
(10, 100),
(10, 10, 10),
# 50 * 512 * 512 = 13 million points = 46 MB. should be a good
# out of cache describing a typical usecase
(50, 512, 512)],
[1,
3,
(0, 5)],
["constant",
"edge", "linear_ramp",
# mean/median/minimum/maximum all use the same code path
"mean",
# reflect/symmetric share alot of the code path
"reflect",
"wrap"],
]
def setup(self, shape, pad_width, mode):
# Make sure to fill the array to make the OS page fault
# in the setup phase and not the timed phase
self.array = np.full(shape, fill_value=1)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
| Make the pad benchmark pagefault in setup | BENCH: Make the pad benchmark pagefault in setup
| Python | bsd-3-clause | shoyer/numpy,grlee77/numpy,mhvk/numpy,pbrod/numpy,endolith/numpy,WarrenWeckesser/numpy,mhvk/numpy,pbrod/numpy,jakirkham/numpy,mattip/numpy,anntzer/numpy,abalkin/numpy,shoyer/numpy,WarrenWeckesser/numpy,madphysicist/numpy,MSeifert04/numpy,madphysicist/numpy,endolith/numpy,mhvk/numpy,mattip/numpy,pizzathief/numpy,seberg/numpy,grlee77/numpy,jakirkham/numpy,pbrod/numpy,jakirkham/numpy,rgommers/numpy,mattip/numpy,shoyer/numpy,MSeifert04/numpy,jorisvandenbossche/numpy,numpy/numpy,ahaldane/numpy,pdebuyl/numpy,numpy/numpy,ahaldane/numpy,charris/numpy,endolith/numpy,ahaldane/numpy,simongibbons/numpy,pdebuyl/numpy,pdebuyl/numpy,shoyer/numpy,mhvk/numpy,charris/numpy,pbrod/numpy,WarrenWeckesser/numpy,jorisvandenbossche/numpy,simongibbons/numpy,MSeifert04/numpy,pizzathief/numpy,grlee77/numpy,rgommers/numpy,MSeifert04/numpy,madphysicist/numpy,abalkin/numpy,charris/numpy,abalkin/numpy,anntzer/numpy,seberg/numpy,pdebuyl/numpy,numpy/numpy,pizzathief/numpy,jorisvandenbossche/numpy,pizzathief/numpy,simongibbons/numpy,MSeifert04/numpy,jakirkham/numpy,pbrod/numpy,madphysicist/numpy,anntzer/numpy,WarrenWeckesser/numpy,charris/numpy,jorisvandenbossche/numpy,jorisvandenbossche/numpy,grlee77/numpy,shoyer/numpy,anntzer/numpy,endolith/numpy,grlee77/numpy,simongibbons/numpy,ahaldane/numpy,mattip/numpy,mhvk/numpy,simongibbons/numpy,seberg/numpy,numpy/numpy,pizzathief/numpy,seberg/numpy,ahaldane/numpy,WarrenWeckesser/numpy,jakirkham/numpy,madphysicist/numpy,rgommers/numpy,rgommers/numpy | """Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,), (10, 100), (10, 10, 10)],
[1, 3, (0, 5)],
["constant", "edge", "linear_ramp", "mean", "reflect", "wrap"],
]
def setup(self, shape, pad_width, mode):
# avoid np.zeros or np.empty's lazy allocation.
# np.full causes pagefaults to occur during setup
# instead of during the benchmark
self.array = np.full(shape, 0)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
BENCH: Make the pad benchmark pagefault in setup | """Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,),
(10, 100),
(10, 10, 10),
# 50 * 512 * 512 = 13 million points = 46 MB. should be a good
# out of cache describing a typical usecase
(50, 512, 512)],
[1,
3,
(0, 5)],
["constant",
"edge", "linear_ramp",
# mean/median/minimum/maximum all use the same code path
"mean",
# reflect/symmetric share alot of the code path
"reflect",
"wrap"],
]
def setup(self, shape, pad_width, mode):
# Make sure to fill the array to make the OS page fault
# in the setup phase and not the timed phase
self.array = np.full(shape, fill_value=1)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
| <commit_before>"""Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,), (10, 100), (10, 10, 10)],
[1, 3, (0, 5)],
["constant", "edge", "linear_ramp", "mean", "reflect", "wrap"],
]
def setup(self, shape, pad_width, mode):
# avoid np.zeros or np.empty's lazy allocation.
# np.full causes pagefaults to occur during setup
# instead of during the benchmark
self.array = np.full(shape, 0)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
<commit_msg>BENCH: Make the pad benchmark pagefault in setup<commit_after> | """Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,),
(10, 100),
(10, 10, 10),
# 50 * 512 * 512 = 13 million points = 46 MB. should be a good
# out of cache describing a typical usecase
(50, 512, 512)],
[1,
3,
(0, 5)],
["constant",
"edge", "linear_ramp",
# mean/median/minimum/maximum all use the same code path
"mean",
# reflect/symmetric share alot of the code path
"reflect",
"wrap"],
]
def setup(self, shape, pad_width, mode):
# Make sure to fill the array to make the OS page fault
# in the setup phase and not the timed phase
self.array = np.full(shape, fill_value=1)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
| """Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,), (10, 100), (10, 10, 10)],
[1, 3, (0, 5)],
["constant", "edge", "linear_ramp", "mean", "reflect", "wrap"],
]
def setup(self, shape, pad_width, mode):
# avoid np.zeros or np.empty's lazy allocation.
# np.full causes pagefaults to occur during setup
# instead of during the benchmark
self.array = np.full(shape, 0)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
BENCH: Make the pad benchmark pagefault in setup"""Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,),
(10, 100),
(10, 10, 10),
# 50 * 512 * 512 = 13 million points = 46 MB. should be a good
# out of cache describing a typical usecase
(50, 512, 512)],
[1,
3,
(0, 5)],
["constant",
"edge", "linear_ramp",
# mean/median/minimum/maximum all use the same code path
"mean",
# reflect/symmetric share alot of the code path
"reflect",
"wrap"],
]
def setup(self, shape, pad_width, mode):
# Make sure to fill the array to make the OS page fault
# in the setup phase and not the timed phase
self.array = np.full(shape, fill_value=1)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
| <commit_before>"""Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,), (10, 100), (10, 10, 10)],
[1, 3, (0, 5)],
["constant", "edge", "linear_ramp", "mean", "reflect", "wrap"],
]
def setup(self, shape, pad_width, mode):
# avoid np.zeros or np.empty's lazy allocation.
# np.full causes pagefaults to occur during setup
# instead of during the benchmark
self.array = np.full(shape, 0)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
<commit_msg>BENCH: Make the pad benchmark pagefault in setup<commit_after>"""Benchmarks for `numpy.lib`."""
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Pad(Benchmark):
"""Benchmarks for `numpy.pad`."""
param_names = ["shape", "pad_width", "mode"]
params = [
[(1000,),
(10, 100),
(10, 10, 10),
# 50 * 512 * 512 = 13 million points = 46 MB. should be a good
# out of cache describing a typical usecase
(50, 512, 512)],
[1,
3,
(0, 5)],
["constant",
"edge", "linear_ramp",
# mean/median/minimum/maximum all use the same code path
"mean",
# reflect/symmetric share alot of the code path
"reflect",
"wrap"],
]
def setup(self, shape, pad_width, mode):
# Make sure to fill the array to make the OS page fault
# in the setup phase and not the timed phase
self.array = np.full(shape, fill_value=1)
def time_pad(self, shape, pad_width, mode):
np.pad(self.array, pad_width, mode)
|
0701f7f4d03045a49190d8aac172daed467ebcd7 | python/xchainer/__init__.py | python/xchainer/__init__.py | from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
| from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
| Remove line added by bad merge | Remove line added by bad merge
| Python | mit | niboshi/chainer,keisuke-umezawa/chainer,jnishi/chainer,wkentaro/chainer,ktnyt/chainer,chainer/chainer,jnishi/chainer,niboshi/chainer,okuta/chainer,chainer/chainer,tkerola/chainer,jnishi/chainer,chainer/chainer,wkentaro/chainer,hvy/chainer,keisuke-umezawa/chainer,pfnet/chainer,ktnyt/chainer,ktnyt/chainer,niboshi/chainer,wkentaro/chainer,okuta/chainer,wkentaro/chainer,ktnyt/chainer,hvy/chainer,hvy/chainer,okuta/chainer,okuta/chainer,chainer/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,niboshi/chainer,hvy/chainer,jnishi/chainer | from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
Remove line added by bad merge | from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
| <commit_before>from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
<commit_msg>Remove line added by bad merge<commit_after> | from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
| from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
Remove line added by bad mergefrom xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
| <commit_before>from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
set_default_device('native')
<commit_msg>Remove line added by bad merge<commit_after>from xchainer._core import * # NOQA
_global_context = Context()
set_global_default_context(_global_context)
|
703a423f4a0aeda7cbeaa542e2f4e0581eee3bda | slot/utils.py | slot/utils.py | import datetime
def to_ticks(dt):
"""Converts a timestamp to ticks"""
return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
def ticks_to_timestamp(ticks):
"""Converts ticks to a timestamp"""
converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700)
return converted
def ticks_now():
"""Returns the current timestamp in ticks"""
return int(to_ticks(datetime.datetime.utcnow()))
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:]) | import datetime
import pytz
this_timezone = pytz.timezone('Europe/London')
def timestamp_to_ticks(dt):
"""Converts a datetime to ticks (seconds since Epoch)"""
delta = (dt - datetime.datetime(1970, 1, 1))
ticks = int(delta.total_seconds())
return ticks
def ticks_to_timestamp(ticks):
"""Converts ticks (seconds since Epoch) to a datetime"""
delta = datetime.timedelta(seconds=ticks)
new_timestamp = datetime.datetime(1970, 1, 1) + delta
return new_timestamp
def ticks_utc_now():
"""Returns the current timestamp in ticks"""
time_now = datetime.datetime.utcnow()
ticks = int(timestamp_to_ticks(time_now))
return ticks
def ticks_local_now():
time_now = datetime.datetime.now(tz=this_timezone)
ticks = int(timestamp_to_ticks(time_now))
return ticks
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:]) | Add timezone support to timestamp helper methods | Add timezone support to timestamp helper methods
| Python | mit | nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT | import datetime
def to_ticks(dt):
"""Converts a timestamp to ticks"""
return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
def ticks_to_timestamp(ticks):
"""Converts ticks to a timestamp"""
converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700)
return converted
def ticks_now():
"""Returns the current timestamp in ticks"""
return int(to_ticks(datetime.datetime.utcnow()))
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:])Add timezone support to timestamp helper methods | import datetime
import pytz
this_timezone = pytz.timezone('Europe/London')
def timestamp_to_ticks(dt):
"""Converts a datetime to ticks (seconds since Epoch)"""
delta = (dt - datetime.datetime(1970, 1, 1))
ticks = int(delta.total_seconds())
return ticks
def ticks_to_timestamp(ticks):
"""Converts ticks (seconds since Epoch) to a datetime"""
delta = datetime.timedelta(seconds=ticks)
new_timestamp = datetime.datetime(1970, 1, 1) + delta
return new_timestamp
def ticks_utc_now():
"""Returns the current timestamp in ticks"""
time_now = datetime.datetime.utcnow()
ticks = int(timestamp_to_ticks(time_now))
return ticks
def ticks_local_now():
time_now = datetime.datetime.now(tz=this_timezone)
ticks = int(timestamp_to_ticks(time_now))
return ticks
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:]) | <commit_before>import datetime
def to_ticks(dt):
"""Converts a timestamp to ticks"""
return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
def ticks_to_timestamp(ticks):
"""Converts ticks to a timestamp"""
converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700)
return converted
def ticks_now():
"""Returns the current timestamp in ticks"""
return int(to_ticks(datetime.datetime.utcnow()))
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:])<commit_msg>Add timezone support to timestamp helper methods<commit_after> | import datetime
import pytz
this_timezone = pytz.timezone('Europe/London')
def timestamp_to_ticks(dt):
"""Converts a datetime to ticks (seconds since Epoch)"""
delta = (dt - datetime.datetime(1970, 1, 1))
ticks = int(delta.total_seconds())
return ticks
def ticks_to_timestamp(ticks):
"""Converts ticks (seconds since Epoch) to a datetime"""
delta = datetime.timedelta(seconds=ticks)
new_timestamp = datetime.datetime(1970, 1, 1) + delta
return new_timestamp
def ticks_utc_now():
"""Returns the current timestamp in ticks"""
time_now = datetime.datetime.utcnow()
ticks = int(timestamp_to_ticks(time_now))
return ticks
def ticks_local_now():
time_now = datetime.datetime.now(tz=this_timezone)
ticks = int(timestamp_to_ticks(time_now))
return ticks
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:]) | import datetime
def to_ticks(dt):
"""Converts a timestamp to ticks"""
return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
def ticks_to_timestamp(ticks):
"""Converts ticks to a timestamp"""
converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700)
return converted
def ticks_now():
"""Returns the current timestamp in ticks"""
return int(to_ticks(datetime.datetime.utcnow()))
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:])Add timezone support to timestamp helper methodsimport datetime
import pytz
this_timezone = pytz.timezone('Europe/London')
def timestamp_to_ticks(dt):
"""Converts a datetime to ticks (seconds since Epoch)"""
delta = (dt - datetime.datetime(1970, 1, 1))
ticks = int(delta.total_seconds())
return ticks
def ticks_to_timestamp(ticks):
"""Converts ticks (seconds since Epoch) to a datetime"""
delta = datetime.timedelta(seconds=ticks)
new_timestamp = datetime.datetime(1970, 1, 1) + delta
return new_timestamp
def ticks_utc_now():
"""Returns the current timestamp in ticks"""
time_now = datetime.datetime.utcnow()
ticks = int(timestamp_to_ticks(time_now))
return ticks
def ticks_local_now():
time_now = datetime.datetime.now(tz=this_timezone)
ticks = int(timestamp_to_ticks(time_now))
return ticks
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:]) | <commit_before>import datetime
def to_ticks(dt):
"""Converts a timestamp to ticks"""
return (dt - datetime.datetime(1970, 1, 1)).total_seconds()
def ticks_to_timestamp(ticks):
"""Converts ticks to a timestamp"""
converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700)
return converted
def ticks_now():
"""Returns the current timestamp in ticks"""
return int(to_ticks(datetime.datetime.utcnow()))
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:])<commit_msg>Add timezone support to timestamp helper methods<commit_after>import datetime
import pytz
this_timezone = pytz.timezone('Europe/London')
def timestamp_to_ticks(dt):
"""Converts a datetime to ticks (seconds since Epoch)"""
delta = (dt - datetime.datetime(1970, 1, 1))
ticks = int(delta.total_seconds())
return ticks
def ticks_to_timestamp(ticks):
"""Converts ticks (seconds since Epoch) to a datetime"""
delta = datetime.timedelta(seconds=ticks)
new_timestamp = datetime.datetime(1970, 1, 1) + delta
return new_timestamp
def ticks_utc_now():
"""Returns the current timestamp in ticks"""
time_now = datetime.datetime.utcnow()
ticks = int(timestamp_to_ticks(time_now))
return ticks
def ticks_local_now():
time_now = datetime.datetime.now(tz=this_timezone)
ticks = int(timestamp_to_ticks(time_now))
return ticks
def mobile_number_string_to_int(mobile_string):
"""Converts mobile numbers from a string to an integer"""
return int(mobile_string)
def redact_mobile_number(mobile_string):
"""Takes a mobile number as a string, and redacts all but the last 3 digits"""
return str.format('XXXXX XXX{0}', mobile_string[-3:]) |
430da3ea616a1118af0b043952bc9d9554086c7f | tpdatasrc/co8fixes/scr/py00248tutorial_room_7.py | tpdatasrc/co8fixes/scr/py00248tutorial_room_7.py | from toee import *
from combat_standard_routines import *
from utilities import *
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
return RUN_DEFAULT
return RUN_DEFAULT | from toee import *
from utilities import *
def correct_zombie_factions():
for obj in game.obj_list_vicinity(location_from_axis(464, 487) ,OLC_NPC):
if obj in game.party:
continue
if obj.faction_has(7) == 0:
obj.faction_add(7)
return
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
correct_zombie_factions()
return RUN_DEFAULT
return RUN_DEFAULT | Fix tutorial zombie faction issue (co8) | Fix tutorial zombie faction issue (co8)
| Python | mit | GrognardsFromHell/TemplePlus,GrognardsFromHell/TemplePlus,GrognardsFromHell/TemplePlus,GrognardsFromHell/TemplePlus,GrognardsFromHell/TemplePlus | from toee import *
from combat_standard_routines import *
from utilities import *
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
return RUN_DEFAULT
return RUN_DEFAULTFix tutorial zombie faction issue (co8) | from toee import *
from utilities import *
def correct_zombie_factions():
for obj in game.obj_list_vicinity(location_from_axis(464, 487) ,OLC_NPC):
if obj in game.party:
continue
if obj.faction_has(7) == 0:
obj.faction_add(7)
return
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
correct_zombie_factions()
return RUN_DEFAULT
return RUN_DEFAULT | <commit_before>from toee import *
from combat_standard_routines import *
from utilities import *
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
return RUN_DEFAULT
return RUN_DEFAULT<commit_msg>Fix tutorial zombie faction issue (co8)<commit_after> | from toee import *
from utilities import *
def correct_zombie_factions():
for obj in game.obj_list_vicinity(location_from_axis(464, 487) ,OLC_NPC):
if obj in game.party:
continue
if obj.faction_has(7) == 0:
obj.faction_add(7)
return
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
correct_zombie_factions()
return RUN_DEFAULT
return RUN_DEFAULT | from toee import *
from combat_standard_routines import *
from utilities import *
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
return RUN_DEFAULT
return RUN_DEFAULTFix tutorial zombie faction issue (co8)from toee import *
from utilities import *
def correct_zombie_factions():
for obj in game.obj_list_vicinity(location_from_axis(464, 487) ,OLC_NPC):
if obj in game.party:
continue
if obj.faction_has(7) == 0:
obj.faction_add(7)
return
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
correct_zombie_factions()
return RUN_DEFAULT
return RUN_DEFAULT | <commit_before>from toee import *
from combat_standard_routines import *
from utilities import *
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
return RUN_DEFAULT
return RUN_DEFAULT<commit_msg>Fix tutorial zombie faction issue (co8)<commit_after>from toee import *
from utilities import *
def correct_zombie_factions():
for obj in game.obj_list_vicinity(location_from_axis(464, 487) ,OLC_NPC):
if obj in game.party:
continue
if obj.faction_has(7) == 0:
obj.faction_add(7)
return
def san_heartbeat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (critter_is_unconscious(obj) == 0):
if attachee.distance_to( obj ) < 30:
if not game.tutorial_is_active():
game.tutorial_toggle()
game.tutorial_show_topic( TAG_TUT_ROOM7_OVERVIEW )
game.global_flags[6] = 1
game.new_sid = 0
correct_zombie_factions()
return RUN_DEFAULT
return RUN_DEFAULT |
d4534c6a6088f377183995be1e098197b650a852 | faker/providers/de_DE/internet.py | faker/providers/de_DE/internet.py | # coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('O', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
| # coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('Ö', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
| Fix capital O missing umlaut | Fix capital O missing umlaut | Python | mit | HAYASAKA-Ryosuke/faker,jaredculp/faker,joke2k/faker,MaryanMorel/faker,johnraz/faker,venmo/faker,xfxf/faker-python,xfxf/faker-1,meganlkm/faker,yiliaofan/faker,GLMeece/faker,joke2k/faker,beetleman/faker,danhuss/faker,thedrow/faker,trtd/faker,ericchaves/faker | # coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('O', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
Fix capital O missing umlaut | # coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('Ö', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
| <commit_before># coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('O', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
<commit_msg>Fix capital O missing umlaut<commit_after> | # coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('Ö', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
| # coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('O', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
Fix capital O missing umlaut# coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('Ö', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
| <commit_before># coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('O', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
<commit_msg>Fix capital O missing umlaut<commit_after># coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
import re
class Provider(InternetProvider):
free_email_domains = (
'web.de', 'gmail.com', 'hotmail.de', 'yahoo.de', 'googlemail.com',
'aol.de', 'gmx.de'
)
tlds = ('com', 'com', 'com', 'net', 'org', 'de', 'de', 'de')
@staticmethod
def _to_ascii(string):
replacements = (
('ä', 'ae'), ('Ä', 'Ae'),
('ö', 'oe'), ('Ö', 'Oe'),
('ü', 'ue'), ('Ü', 'Ue'),
('ß', 'ss')
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(
self.bothify(self.generator.parse(pattern)
).lower())
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = self._to_ascii(company_elements.pop(0))
return re.sub(r'\W', '', company).lower()
|
b15591a8a232bdc59281aa0c7750bdc32f7e3103 | pupa/importers/jurisdiction.py | pupa/importers/jurisdiction.py | import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
| import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
if not obj['_id'].startswith("ocd-jurisdiction/"):
raise ValueError("The Jurisdiction appears to have an ID that does not"
" begin with 'ocd-jurisdiction'. I found '%s'" % (
jurisdiction.jurisdiction_id))
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
| Throw a ValueError if we get a non-JID for the JID | Throw a ValueError if we get a non-JID for the JID
| Python | bsd-3-clause | datamade/pupa,rshorey/pupa,influence-usa/pupa,mileswwatkins/pupa,opencivicdata/pupa,rshorey/pupa,mileswwatkins/pupa,datamade/pupa,influence-usa/pupa,opencivicdata/pupa | import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
Throw a ValueError if we get a non-JID for the JID | import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
if not obj['_id'].startswith("ocd-jurisdiction/"):
raise ValueError("The Jurisdiction appears to have an ID that does not"
" begin with 'ocd-jurisdiction'. I found '%s'" % (
jurisdiction.jurisdiction_id))
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
| <commit_before>import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
<commit_msg>Throw a ValueError if we get a non-JID for the JID<commit_after> | import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
if not obj['_id'].startswith("ocd-jurisdiction/"):
raise ValueError("The Jurisdiction appears to have an ID that does not"
" begin with 'ocd-jurisdiction'. I found '%s'" % (
jurisdiction.jurisdiction_id))
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
| import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
Throw a ValueError if we get a non-JID for the JIDimport os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
if not obj['_id'].startswith("ocd-jurisdiction/"):
raise ValueError("The Jurisdiction appears to have an ID that does not"
" begin with 'ocd-jurisdiction'. I found '%s'" % (
jurisdiction.jurisdiction_id))
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
| <commit_before>import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
<commit_msg>Throw a ValueError if we get a non-JID for the JID<commit_after>import os
import json
import datetime
from pupa.core import db
from pupa.models import Organization
from pupa.models.utils import DatetimeValidator
from pupa.models.schemas.jurisdiction import schema as jurisdiction_schema
def import_jurisdiction(org_importer, jurisdiction):
obj = jurisdiction.get_db_object()
obj['_type'] = 'jurisdiction'
obj['_id'] = jurisdiction.jurisdiction_id
if not obj['_id'].startswith("ocd-jurisdiction/"):
raise ValueError("The Jurisdiction appears to have an ID that does not"
" begin with 'ocd-jurisdiction'. I found '%s'" % (
jurisdiction.jurisdiction_id))
obj['latest_update'] = datetime.datetime.utcnow()
# validate jurisdiction
validator = DatetimeValidator()
try:
validator.validate(obj, jurisdiction_schema)
except ValueError as ve:
raise ve
db.jurisdictions.save(obj)
# create organization(s) (TODO: if there are multiple chambers this isn't right)
org = Organization(name=jurisdiction.name, classification='legislature',
jurisdiction_id=jurisdiction.jurisdiction_id)
if jurisdiction.other_names:
org.other_names = jurisdiction.other_names
if jurisdiction.parent_id:
org.parent_id = jurisdiction.parent_id
org_importer.import_object(org)
# create parties
for party in jurisdiction.parties:
org = Organization(**{'classification': 'party',
'name': party['name'],
'parent_id': None})
org_importer.import_object(org)
|
528c10b3988a93668c6a0d4c0b8a7de2667204b1 | frontend/ligscore/results_page.py | frontend/ligscore/results_page.py | from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = get_int('from', 1)
show_to = get_int('to', 20)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
def get_int(name, default):
try:
return int(request.args.get(name, ""))
except ValueError:
return default
| from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = request.args.get('from', 1, type=int)
show_to = request.args.get('to', 20, type=int)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
| Drop our own get_int() function | Drop our own get_int() function
We don't need a custom function to get an int parameter;
flask/werkzeug already handles this.
| Python | lgpl-2.1 | salilab/ligscore,salilab/ligscore | from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = get_int('from', 1)
show_to = get_int('to', 20)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
def get_int(name, default):
try:
return int(request.args.get(name, ""))
except ValueError:
return default
Drop our own get_int() function
We don't need a custom function to get an int parameter;
flask/werkzeug already handles this. | from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = request.args.get('from', 1, type=int)
show_to = request.args.get('to', 20, type=int)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
| <commit_before>from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = get_int('from', 1)
show_to = get_int('to', 20)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
def get_int(name, default):
try:
return int(request.args.get(name, ""))
except ValueError:
return default
<commit_msg>Drop our own get_int() function
We don't need a custom function to get an int parameter;
flask/werkzeug already handles this.<commit_after> | from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = request.args.get('from', 1, type=int)
show_to = request.args.get('to', 20, type=int)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
| from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = get_int('from', 1)
show_to = get_int('to', 20)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
def get_int(name, default):
try:
return int(request.args.get(name, ""))
except ValueError:
return default
Drop our own get_int() function
We don't need a custom function to get an int parameter;
flask/werkzeug already handles this.from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = request.args.get('from', 1, type=int)
show_to = request.args.get('to', 20, type=int)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
| <commit_before>from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = get_int('from', 1)
show_to = get_int('to', 20)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
def get_int(name, default):
try:
return int(request.args.get(name, ""))
except ValueError:
return default
<commit_msg>Drop our own get_int() function
We don't need a custom function to get an int parameter;
flask/werkzeug already handles this.<commit_after>from flask import request
import saliweb.frontend
import collections
Transform = collections.namedtuple('Transform', ['number', 'score'])
def show_results_page(job):
show_from = request.args.get('from', 1, type=int)
show_to = request.args.get('to', 20, type=int)
with open(job.get_path('input.txt')) as fh:
receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ')
num_transforms = 0
transforms = []
with open(job.get_path('score.list')) as fh:
for line in fh:
spl = line.rstrip('\r\n').split()
if len(spl) > 0:
num_transforms += 1
if num_transforms >= show_from and num_transforms <= show_to:
transforms.append(Transform(number=num_transforms,
score="%.2f" % float(spl[-1])))
return saliweb.frontend.render_results_template(
"results_ok.html",
receptor=receptor, ligand=ligand, scoretype=scoretype,
transforms=transforms, show_from=show_from, show_to=show_to,
num_transforms=num_transforms, job=job)
|
e11bc2ebc701dd947d3d5734339b4815bbd21fd1 | PythonScript/Helper/Dujing.py | PythonScript/Helper/Dujing.py | # This Python file uses the following encoding: utf-8
import sys
import getopt
import Convert
def usage():
print "Usage: to be done."
def main(argv):
try:
opts, args = getopt.getopt(argv, "hb:d", ["help", "book="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(2)
elif opt == "-d":
global _debug
_debug = 1
elif opt in ("-b","--book"):
filePath = arg
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main(sys.argv[1:]) | # This Python file uses the following encoding: utf-8
import sys
import argparse
import Convert
def main():
parser = argparse.ArgumentParser(description='Generate a classic book with the desired format.')
parser.add_argument('book', type=str, help='a book file')
args = parser.parse_args()
filePath = args.book
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
| Use argparse (instead of getopt) to get the usage information | Use argparse (instead of getopt) to get the usage information
| Python | mit | fan-jiang/Dujing | # This Python file uses the following encoding: utf-8
import sys
import getopt
import Convert
def usage():
print "Usage: to be done."
def main(argv):
try:
opts, args = getopt.getopt(argv, "hb:d", ["help", "book="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(2)
elif opt == "-d":
global _debug
_debug = 1
elif opt in ("-b","--book"):
filePath = arg
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main(sys.argv[1:])Use argparse (instead of getopt) to get the usage information | # This Python file uses the following encoding: utf-8
import sys
import argparse
import Convert
def main():
parser = argparse.ArgumentParser(description='Generate a classic book with the desired format.')
parser.add_argument('book', type=str, help='a book file')
args = parser.parse_args()
filePath = args.book
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
| <commit_before># This Python file uses the following encoding: utf-8
import sys
import getopt
import Convert
def usage():
print "Usage: to be done."
def main(argv):
try:
opts, args = getopt.getopt(argv, "hb:d", ["help", "book="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(2)
elif opt == "-d":
global _debug
_debug = 1
elif opt in ("-b","--book"):
filePath = arg
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main(sys.argv[1:])<commit_msg>Use argparse (instead of getopt) to get the usage information<commit_after> | # This Python file uses the following encoding: utf-8
import sys
import argparse
import Convert
def main():
parser = argparse.ArgumentParser(description='Generate a classic book with the desired format.')
parser.add_argument('book', type=str, help='a book file')
args = parser.parse_args()
filePath = args.book
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
| # This Python file uses the following encoding: utf-8
import sys
import getopt
import Convert
def usage():
print "Usage: to be done."
def main(argv):
try:
opts, args = getopt.getopt(argv, "hb:d", ["help", "book="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(2)
elif opt == "-d":
global _debug
_debug = 1
elif opt in ("-b","--book"):
filePath = arg
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main(sys.argv[1:])Use argparse (instead of getopt) to get the usage information# This Python file uses the following encoding: utf-8
import sys
import argparse
import Convert
def main():
parser = argparse.ArgumentParser(description='Generate a classic book with the desired format.')
parser.add_argument('book', type=str, help='a book file')
args = parser.parse_args()
filePath = args.book
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
| <commit_before># This Python file uses the following encoding: utf-8
import sys
import getopt
import Convert
def usage():
print "Usage: to be done."
def main(argv):
try:
opts, args = getopt.getopt(argv, "hb:d", ["help", "book="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(2)
elif opt == "-d":
global _debug
_debug = 1
elif opt in ("-b","--book"):
filePath = arg
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main(sys.argv[1:])<commit_msg>Use argparse (instead of getopt) to get the usage information<commit_after># This Python file uses the following encoding: utf-8
import sys
import argparse
import Convert
def main():
parser = argparse.ArgumentParser(description='Generate a classic book with the desired format.')
parser.add_argument('book', type=str, help='a book file')
args = parser.parse_args()
filePath = args.book
try:
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert.Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
29e1c2e30d284e1992bae59fe522c31b4e627f0d | dataset/dataset/pipelines.py | dataset/dataset/pipelines.py | # Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
def process_item(self, item, spider):
return item
| import re
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
title_regex = re.compile('(((((\\(?[A-Za-z]{1}[-A-Za-z]+,?\\)?)|[-0-9]+)|-)|\\(?[A-Za-z0-9]+\\)?) *)+')
frequency_regex = re.compile('([A-Z]{1}[a-z]+ *)+')
def process_item(self, item, spider):
if item['name']:
item['name'] = self.title_regex.search(item['name'][0].encode('ascii', 'ignore')).group()
else:
item['name'] = 'Dataset Title Regex Matching Unsuccessful'
if item['frequency']:
item['frequency'] = self.frequency_regex.search(item['frequency'][0].encode('ascii','ignore')).group()
else:
item['frequency'] = 'Dataset Frequency Attribute Regex Matching Unsuccessful'
return item
| Convert item processing to pipeline module | Convert item processing to pipeline module
| Python | mit | MaxLikelihood/CODE | # Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
def process_item(self, item, spider):
return item
Convert item processing to pipeline module | import re
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
title_regex = re.compile('(((((\\(?[A-Za-z]{1}[-A-Za-z]+,?\\)?)|[-0-9]+)|-)|\\(?[A-Za-z0-9]+\\)?) *)+')
frequency_regex = re.compile('([A-Z]{1}[a-z]+ *)+')
def process_item(self, item, spider):
if item['name']:
item['name'] = self.title_regex.search(item['name'][0].encode('ascii', 'ignore')).group()
else:
item['name'] = 'Dataset Title Regex Matching Unsuccessful'
if item['frequency']:
item['frequency'] = self.frequency_regex.search(item['frequency'][0].encode('ascii','ignore')).group()
else:
item['frequency'] = 'Dataset Frequency Attribute Regex Matching Unsuccessful'
return item
| <commit_before># Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
def process_item(self, item, spider):
return item
<commit_msg>Convert item processing to pipeline module<commit_after> | import re
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
title_regex = re.compile('(((((\\(?[A-Za-z]{1}[-A-Za-z]+,?\\)?)|[-0-9]+)|-)|\\(?[A-Za-z0-9]+\\)?) *)+')
frequency_regex = re.compile('([A-Z]{1}[a-z]+ *)+')
def process_item(self, item, spider):
if item['name']:
item['name'] = self.title_regex.search(item['name'][0].encode('ascii', 'ignore')).group()
else:
item['name'] = 'Dataset Title Regex Matching Unsuccessful'
if item['frequency']:
item['frequency'] = self.frequency_regex.search(item['frequency'][0].encode('ascii','ignore')).group()
else:
item['frequency'] = 'Dataset Frequency Attribute Regex Matching Unsuccessful'
return item
| # Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
def process_item(self, item, spider):
return item
Convert item processing to pipeline moduleimport re
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
title_regex = re.compile('(((((\\(?[A-Za-z]{1}[-A-Za-z]+,?\\)?)|[-0-9]+)|-)|\\(?[A-Za-z0-9]+\\)?) *)+')
frequency_regex = re.compile('([A-Z]{1}[a-z]+ *)+')
def process_item(self, item, spider):
if item['name']:
item['name'] = self.title_regex.search(item['name'][0].encode('ascii', 'ignore')).group()
else:
item['name'] = 'Dataset Title Regex Matching Unsuccessful'
if item['frequency']:
item['frequency'] = self.frequency_regex.search(item['frequency'][0].encode('ascii','ignore')).group()
else:
item['frequency'] = 'Dataset Frequency Attribute Regex Matching Unsuccessful'
return item
| <commit_before># Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
def process_item(self, item, spider):
return item
<commit_msg>Convert item processing to pipeline module<commit_after>import re
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class DatasetPipeline(object):
title_regex = re.compile('(((((\\(?[A-Za-z]{1}[-A-Za-z]+,?\\)?)|[-0-9]+)|-)|\\(?[A-Za-z0-9]+\\)?) *)+')
frequency_regex = re.compile('([A-Z]{1}[a-z]+ *)+')
def process_item(self, item, spider):
if item['name']:
item['name'] = self.title_regex.search(item['name'][0].encode('ascii', 'ignore')).group()
else:
item['name'] = 'Dataset Title Regex Matching Unsuccessful'
if item['frequency']:
item['frequency'] = self.frequency_regex.search(item['frequency'][0].encode('ascii','ignore')).group()
else:
item['frequency'] = 'Dataset Frequency Attribute Regex Matching Unsuccessful'
return item
|
80162fd636cea87b9d096d6df8b93c59887d8785 | scripts/crontab/gen-cron.py | scripts/crontab/gen-cron.py | #!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
ctx = {'django': 'cd %s; %s manage.py' % (opts.zamboni, opts.python)}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
dogwrap_path = '/usr/local/bin/amo_cron_dogwrap'
ctx = {
"django": "cd %s; %s %s manage.py" % (opts.zamboni,
dogwrap_path,
opts.python)
}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
| Add DataDog monitoring to cron job runs | Add DataDog monitoring to cron job runs
| Python | bsd-3-clause | mozilla/addons-server,diox/olympia,mozilla/addons-server,kumar303/olympia,wagnerand/addons-server,atiqueahmedziad/addons-server,eviljeff/olympia,eviljeff/olympia,wagnerand/addons-server,kumar303/addons-server,wagnerand/addons-server,bqbn/addons-server,psiinon/addons-server,bqbn/addons-server,kumar303/olympia,diox/olympia,psiinon/addons-server,bqbn/addons-server,wagnerand/olympia,psiinon/addons-server,mozilla/addons-server,mozilla/olympia,mozilla/addons-server,atiqueahmedziad/addons-server,kumar303/olympia,diox/olympia,wagnerand/addons-server,wagnerand/olympia,eviljeff/olympia,kumar303/olympia,atiqueahmedziad/addons-server,kumar303/addons-server,psiinon/addons-server,diox/olympia,kumar303/addons-server,aviarypl/mozilla-l10n-addons-server,mozilla/olympia,mozilla/olympia,atiqueahmedziad/addons-server,bqbn/addons-server,wagnerand/olympia,kumar303/addons-server,aviarypl/mozilla-l10n-addons-server,mozilla/olympia,aviarypl/mozilla-l10n-addons-server,eviljeff/olympia,wagnerand/olympia,aviarypl/mozilla-l10n-addons-server | #!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
ctx = {'django': 'cd %s; %s manage.py' % (opts.zamboni, opts.python)}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
Add DataDog monitoring to cron job runs | #!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
dogwrap_path = '/usr/local/bin/amo_cron_dogwrap'
ctx = {
"django": "cd %s; %s %s manage.py" % (opts.zamboni,
dogwrap_path,
opts.python)
}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
ctx = {'django': 'cd %s; %s manage.py' % (opts.zamboni, opts.python)}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
<commit_msg>Add DataDog monitoring to cron job runs<commit_after> | #!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
dogwrap_path = '/usr/local/bin/amo_cron_dogwrap'
ctx = {
"django": "cd %s; %s %s manage.py" % (opts.zamboni,
dogwrap_path,
opts.python)
}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
ctx = {'django': 'cd %s; %s manage.py' % (opts.zamboni, opts.python)}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
Add DataDog monitoring to cron job runs#!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
dogwrap_path = '/usr/local/bin/amo_cron_dogwrap'
ctx = {
"django": "cd %s; %s %s manage.py" % (opts.zamboni,
dogwrap_path,
opts.python)
}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
ctx = {'django': 'cd %s; %s manage.py' % (opts.zamboni, opts.python)}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
<commit_msg>Add DataDog monitoring to cron job runs<commit_after>#!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.7",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
dogwrap_path = '/usr/local/bin/amo_cron_dogwrap'
ctx = {
"django": "cd %s; %s %s manage.py" % (opts.zamboni,
dogwrap_path,
opts.python)
}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print(TEMPLATE % ctx)
if __name__ == "__main__":
main()
|
9465d3985e6b6bc87b65a2f89f27871c85ca77e3 | c_major.py | c_major.py | from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(0)
s.track2 = s.engine.track(1)
s.track3 = s.engine.track(2)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
| from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.track3 = s.engine.track(3)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
| Use tracks 1-3 instead of 0-2, to make room for reserved track | Use tracks 1-3 instead of 0-2, to make room for reserved track
| Python | unlicense | metrasynth/gallery,metrasynth/gallery,metrasynth/gallery | from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(0)
s.track2 = s.engine.track(1)
s.track3 = s.engine.track(2)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
Use tracks 1-3 instead of 0-2, to make room for reserved track | from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.track3 = s.engine.track(3)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
| <commit_before>from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(0)
s.track2 = s.engine.track(1)
s.track3 = s.engine.track(2)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
<commit_msg>Use tracks 1-3 instead of 0-2, to make room for reserved track<commit_after> | from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.track3 = s.engine.track(3)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
| from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(0)
s.track2 = s.engine.track(1)
s.track3 = s.engine.track(2)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
Use tracks 1-3 instead of 0-2, to make room for reserved trackfrom s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.track3 = s.engine.track(3)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
| <commit_before>from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(0)
s.track2 = s.engine.track(1)
s.track3 = s.engine.track(2)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
<commit_msg>Use tracks 1-3 instead of 0-2, to make room for reserved track<commit_after>from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.track3 = s.engine.track(3)
with s[0, 0]:
s.note1 = s.fm.note_on(n.C4) | s.track1 | s
with s[1, 0]:
s.note2 = s.fm.note_on(n.E4) | s.track2 | s
with s[2, 0]:
s.note3 = s.fm.note_on(n.G4) | s.track3 | s
with s[4, 0]:
s.note1.off() | s
s.note2.off() | s
s.note3.off() | s
if __name__ == '__main__':
play(s)
input()
|
ae77f5d0050167e0ce137ab876d724658c961f3d | forms.py | forms.py | """
UK-specific Form helpers
"""
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
from ie_counties import IE_COUNTY_CHOICES
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
| """
UK-specific Form helpers
"""
from __future__ import absolute_import
from django.contrib.localflavor.ie.ie_counties import IE_COUNTY_CHOICES
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
| Remove all relative imports. We have always been at war with relative imports. | Remove all relative imports. We have always been at war with relative imports.
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@17009 bcc190cf-cafb-0310-a4f2-bffc1f526a37
| Python | bsd-3-clause | martinogden/django-localflavor-ie | """
UK-specific Form helpers
"""
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
from ie_counties import IE_COUNTY_CHOICES
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
Remove all relative imports. We have always been at war with relative imports.
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@17009 bcc190cf-cafb-0310-a4f2-bffc1f526a37 | """
UK-specific Form helpers
"""
from __future__ import absolute_import
from django.contrib.localflavor.ie.ie_counties import IE_COUNTY_CHOICES
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
| <commit_before>"""
UK-specific Form helpers
"""
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
from ie_counties import IE_COUNTY_CHOICES
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
<commit_msg>Remove all relative imports. We have always been at war with relative imports.
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@17009 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after> | """
UK-specific Form helpers
"""
from __future__ import absolute_import
from django.contrib.localflavor.ie.ie_counties import IE_COUNTY_CHOICES
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
| """
UK-specific Form helpers
"""
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
from ie_counties import IE_COUNTY_CHOICES
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
Remove all relative imports. We have always been at war with relative imports.
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@17009 bcc190cf-cafb-0310-a4f2-bffc1f526a37"""
UK-specific Form helpers
"""
from __future__ import absolute_import
from django.contrib.localflavor.ie.ie_counties import IE_COUNTY_CHOICES
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
| <commit_before>"""
UK-specific Form helpers
"""
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
from ie_counties import IE_COUNTY_CHOICES
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
<commit_msg>Remove all relative imports. We have always been at war with relative imports.
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@17009 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after>"""
UK-specific Form helpers
"""
from __future__ import absolute_import
from django.contrib.localflavor.ie.ie_counties import IE_COUNTY_CHOICES
from django.forms.fields import Select
class IECountySelect(Select):
"""
A Select widget that uses a list of Irish Counties as its choices.
"""
def __init__(self, attrs=None):
super(IECountySelect, self).__init__(attrs, choices=IE_COUNTY_CHOICES)
|
31961f9cbfd01955fe94d13cd9f6d9a9a84f3485 | server/proposal/__init__.py | server/proposal/__init__.py | from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
| from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
from . import image_tasks
| Load image processing tasks on startup | Load image processing tasks on startup
| Python | mit | cityofsomerville/citydash,codeforboston/cornerwise,cityofsomerville/cornerwise,codeforboston/cornerwise,cityofsomerville/cornerwise,cityofsomerville/citydash,codeforboston/cornerwise,codeforboston/cornerwise,cityofsomerville/citydash,cityofsomerville/cornerwise,cityofsomerville/cornerwise,cityofsomerville/citydash | from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
Load image processing tasks on startup | from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
from . import image_tasks
| <commit_before>from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
<commit_msg>Load image processing tasks on startup<commit_after> | from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
from . import image_tasks
| from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
Load image processing tasks on startupfrom django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
from . import image_tasks
| <commit_before>from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
<commit_msg>Load image processing tasks on startup<commit_after>from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
from . import event_tasks
from . import image_tasks
|
d409594c01e11e05a59f5614722dd3035855e399 | salt/returners/redis_return.py | salt/returners/redis_return.py | '''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.set(ret['id'] + ':' + ret['jid'], json.dumps(ret['return']))
| '''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.sadd(ret['id'] + 'jobs', ret['jid'])
serv.set(ret['jid'] + ':' + ret['id'], json.dumps(ret['return']))
serv.sadd('jobs', ret['jid'])
serv.sadd(ret['jid'], ret['id'])
| Change the redis returner to better use data structures | Change the redis returner to better use data structures
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | '''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.set(ret['id'] + ':' + ret['jid'], json.dumps(ret['return']))
Change the redis returner to better use data structures | '''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.sadd(ret['id'] + 'jobs', ret['jid'])
serv.set(ret['jid'] + ':' + ret['id'], json.dumps(ret['return']))
serv.sadd('jobs', ret['jid'])
serv.sadd(ret['jid'], ret['id'])
| <commit_before>'''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.set(ret['id'] + ':' + ret['jid'], json.dumps(ret['return']))
<commit_msg>Change the redis returner to better use data structures<commit_after> | '''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.sadd(ret['id'] + 'jobs', ret['jid'])
serv.set(ret['jid'] + ':' + ret['id'], json.dumps(ret['return']))
serv.sadd('jobs', ret['jid'])
serv.sadd(ret['jid'], ret['id'])
| '''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.set(ret['id'] + ':' + ret['jid'], json.dumps(ret['return']))
Change the redis returner to better use data structures'''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.sadd(ret['id'] + 'jobs', ret['jid'])
serv.set(ret['jid'] + ':' + ret['id'], json.dumps(ret['return']))
serv.sadd('jobs', ret['jid'])
serv.sadd(ret['jid'], ret['id'])
| <commit_before>'''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.set(ret['id'] + ':' + ret['jid'], json.dumps(ret['return']))
<commit_msg>Change the redis returner to better use data structures<commit_after>'''
Return data to a redis server
This is a VERY simple example for pushing data to a redis server and is not
nessisarily intended as a usable interface.
'''
import redis
import json
__opts__ = {
'redis.host': 'mcp',
'redis.port': 6379,
'redis.db': '0',
}
def returner(ret):
'''
Return data to a redis data store
'''
serv = redis.Redis(
host=__opts__['redis.host'],
port=__opts__['redis.port'],
db=__opts__['redis.db'])
serv.sadd(ret['id'] + 'jobs', ret['jid'])
serv.set(ret['jid'] + ':' + ret['id'], json.dumps(ret['return']))
serv.sadd('jobs', ret['jid'])
serv.sadd(ret['jid'], ret['id'])
|
936bdadb9e949d29a7742b088e0279680afa6c4a | copy_from_find_in_files_command.py | copy_from_find_in_files_command.py | import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = r'^\s*\d+(\:\s|\s{2})'
without_dots = r'^\s*(\d+(\:\s|\s{2})|.+\n)'
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return re.sub(self.construct(), '', text, flags=re.MULTILINE)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
| import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = re.compile('^\s*\d+(\:\s|\s{2})', re.MULTILINE)
without_dots = re.compile('^\s*(\d+(\:\s|\s{2})|.+\n)', re.MULTILINE)
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return self.construct().sub('', text)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
| Make the regex work on Python2.x | Make the regex work on Python2.x
| Python | mit | kema221/sublime-copy-from-find-results,NicoSantangelo/sublime-copy-from-find-results,kema221/sublime-copy-from-find-results | import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = r'^\s*\d+(\:\s|\s{2})'
without_dots = r'^\s*(\d+(\:\s|\s{2})|.+\n)'
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return re.sub(self.construct(), '', text, flags=re.MULTILINE)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
Make the regex work on Python2.x | import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = re.compile('^\s*\d+(\:\s|\s{2})', re.MULTILINE)
without_dots = re.compile('^\s*(\d+(\:\s|\s{2})|.+\n)', re.MULTILINE)
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return self.construct().sub('', text)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
| <commit_before>import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = r'^\s*\d+(\:\s|\s{2})'
without_dots = r'^\s*(\d+(\:\s|\s{2})|.+\n)'
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return re.sub(self.construct(), '', text, flags=re.MULTILINE)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
<commit_msg>Make the regex work on Python2.x<commit_after> | import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = re.compile('^\s*\d+(\:\s|\s{2})', re.MULTILINE)
without_dots = re.compile('^\s*(\d+(\:\s|\s{2})|.+\n)', re.MULTILINE)
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return self.construct().sub('', text)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
| import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = r'^\s*\d+(\:\s|\s{2})'
without_dots = r'^\s*(\d+(\:\s|\s{2})|.+\n)'
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return re.sub(self.construct(), '', text, flags=re.MULTILINE)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
Make the regex work on Python2.ximport sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = re.compile('^\s*\d+(\:\s|\s{2})', re.MULTILINE)
without_dots = re.compile('^\s*(\d+(\:\s|\s{2})|.+\n)', re.MULTILINE)
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return self.construct().sub('', text)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
| <commit_before>import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = r'^\s*\d+(\:\s|\s{2})'
without_dots = r'^\s*(\d+(\:\s|\s{2})|.+\n)'
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return re.sub(self.construct(), '', text, flags=re.MULTILINE)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
<commit_msg>Make the regex work on Python2.x<commit_after>import sublime
import sublime_plugin
import re
class CopyFromFindInFilesCommand(sublime_plugin.TextCommand):
def run(self, edit, force=False):
self.view.run_command('copy')
if not self.in_find_results_view() and not force:
return
clipboard_contents = sublime.get_clipboard()
if clipboard_contents:
settings = sublime.load_settings('CopyFromFindInFiles.sublime-settings')
keep_intermediate_dots = settings.get('keep_intermediate_dots', False)
new_clipboard = RegexStruct(keep_intermediate_dots).sub(clipboard_contents)
sublime.set_clipboard(new_clipboard)
def in_find_results_view(self):
return self.view.settings().get('syntax') == 'Packages/Default/Find Results.hidden-tmLanguage'
class RegexStruct():
default = re.compile('^\s*\d+(\:\s|\s{2})', re.MULTILINE)
without_dots = re.compile('^\s*(\d+(\:\s|\s{2})|.+\n)', re.MULTILINE)
def __init__(self, keep_dots=True):
self.keep_dots = keep_dots
def sub(self, text):
return self.construct().sub('', text)
def construct(self):
return RegexStruct.default if self.keep_dots else RegexStruct.without_dots
|
2a9b8843767963fed13a9bd145aa5835a4e13dce | autocloud/__init__.py | autocloud/__init__.py | # -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
config = ConfigParser.RawConfigParser()
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
| # -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
default = {'host': '127.0.0.1', 'port': 5000}
config = ConfigParser.RawConfigParser(default)
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host')
PORT = config.getint('autocloud', 'port')
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
| Fix loading of default config values | config: Fix loading of default config values
If RawConfigParser is not able to find config
using get(), it doesn't return None, instead
it raises an Exception.
Signed-off-by: Vivek Anand <6cbec6cb1b0c30c91d3fca6c61ddeb9b64cef11c@gmail.com>
| Python | agpl-3.0 | kushaldas/autocloud,kushaldas/autocloud,kushaldas/autocloud,kushaldas/autocloud | # -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
config = ConfigParser.RawConfigParser()
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
config: Fix loading of default config values
If RawConfigParser is not able to find config
using get(), it doesn't return None, instead
it raises an Exception.
Signed-off-by: Vivek Anand <6cbec6cb1b0c30c91d3fca6c61ddeb9b64cef11c@gmail.com> | # -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
default = {'host': '127.0.0.1', 'port': 5000}
config = ConfigParser.RawConfigParser(default)
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host')
PORT = config.getint('autocloud', 'port')
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
| <commit_before># -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
config = ConfigParser.RawConfigParser()
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
<commit_msg>config: Fix loading of default config values
If RawConfigParser is not able to find config
using get(), it doesn't return None, instead
it raises an Exception.
Signed-off-by: Vivek Anand <6cbec6cb1b0c30c91d3fca6c61ddeb9b64cef11c@gmail.com><commit_after> | # -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
default = {'host': '127.0.0.1', 'port': 5000}
config = ConfigParser.RawConfigParser(default)
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host')
PORT = config.getint('autocloud', 'port')
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
| # -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
config = ConfigParser.RawConfigParser()
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
config: Fix loading of default config values
If RawConfigParser is not able to find config
using get(), it doesn't return None, instead
it raises an Exception.
Signed-off-by: Vivek Anand <6cbec6cb1b0c30c91d3fca6c61ddeb9b64cef11c@gmail.com># -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
default = {'host': '127.0.0.1', 'port': 5000}
config = ConfigParser.RawConfigParser(default)
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host')
PORT = config.getint('autocloud', 'port')
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
| <commit_before># -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
config = ConfigParser.RawConfigParser()
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
<commit_msg>config: Fix loading of default config values
If RawConfigParser is not able to find config
using get(), it doesn't return None, instead
it raises an Exception.
Signed-off-by: Vivek Anand <6cbec6cb1b0c30c91d3fca6c61ddeb9b64cef11c@gmail.com><commit_after># -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
default = {'host': '127.0.0.1', 'port': 5000}
config = ConfigParser.RawConfigParser(default)
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host')
PORT = config.getint('autocloud', 'port')
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
|
9c68a69eb5bf6e7ffab8b7538797c74b05a7c70b | src/zeit/content/article/edit/browser/tests/test_header.py | src/zeit/content/article/edit/browser/tests/test_header.py | import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.waitForVisible('css=.fieldname-header_layout')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.type('id=options-template.header_layout', '\t')
s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| Test needs to wait until the header values are updated after it changed the template | FIX: Test needs to wait until the header values are updated after it changed the template
I'm not sure how this previously has ever passed, to be honest.
| Python | bsd-3-clause | ZeitOnline/zeit.content.article,ZeitOnline/zeit.content.article,ZeitOnline/zeit.content.article | import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.waitForVisible('css=.fieldname-header_layout')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
FIX: Test needs to wait until the header values are updated after it changed the template
I'm not sure how this previously has ever passed, to be honest. | import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.type('id=options-template.header_layout', '\t')
s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| <commit_before>import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.waitForVisible('css=.fieldname-header_layout')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
<commit_msg>FIX: Test needs to wait until the header values are updated after it changed the template
I'm not sure how this previously has ever passed, to be honest.<commit_after> | import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.type('id=options-template.header_layout', '\t')
s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.waitForVisible('css=.fieldname-header_layout')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
FIX: Test needs to wait until the header values are updated after it changed the template
I'm not sure how this previously has ever passed, to be honest.import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.type('id=options-template.header_layout', '\t')
s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| <commit_before>import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.waitForVisible('css=.fieldname-header_layout')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
<commit_msg>FIX: Test needs to wait until the header values are updated after it changed the template
I'm not sure how this previously has ever passed, to be honest.<commit_after>import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.type('id=options-template.header_layout', '\t')
s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
|
d6a67a94cacab93463f2a15fc5d2a2fadae2ad83 | site/tests/test_unittest.py | site/tests/test_unittest.py | import unittest
class IntegerArithmenticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
unittest.main(exit=False) | import unittest
class IntegerArithmeticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase)
unittest.TextTestRunner(verbosity=0).run(suite) | Change unittest test in test suite : it is not run in module __main__ | Change unittest test in test suite : it is not run in module __main__
| Python | bsd-3-clause | Hasimir/brython,olemis/brython,JohnDenker/brython,firmlyjin/brython,Isendir/brython,jonathanverner/brython,Mozhuowen/brython,olemis/brython,brython-dev/brython,Lh4cKg/brython,Isendir/brython,kevinmel2000/brython,amrdraz/brython,jonathanverner/brython,molebot/brython,Mozhuowen/brython,jonathanverner/brython,JohnDenker/brython,olemis/brython,jonathanverner/brython,Hasimir/brython,kikocorreoso/brython,brython-dev/brython,Hasimir/brython,firmlyjin/brython,kevinmel2000/brython,Lh4cKg/brython,molebot/brython,amrdraz/brython,rubyinhell/brython,kikocorreoso/brython,Mozhuowen/brython,Isendir/brython,firmlyjin/brython,kevinmel2000/brython,Mozhuowen/brython,firmlyjin/brython,JohnDenker/brython,olemis/brython,molebot/brython,amrdraz/brython,brython-dev/brython,Isendir/brython,JohnDenker/brython,rubyinhell/brython,kikocorreoso/brython,firmlyjin/brython,Hasimir/brython,kevinmel2000/brython,molebot/brython,Lh4cKg/brython,rubyinhell/brython,rubyinhell/brython,amrdraz/brython,Lh4cKg/brython | import unittest
class IntegerArithmenticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
unittest.main(exit=False)Change unittest test in test suite : it is not run in module __main__ | import unittest
class IntegerArithmeticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase)
unittest.TextTestRunner(verbosity=0).run(suite) | <commit_before>import unittest
class IntegerArithmenticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
unittest.main(exit=False)<commit_msg>Change unittest test in test suite : it is not run in module __main__<commit_after> | import unittest
class IntegerArithmeticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase)
unittest.TextTestRunner(verbosity=0).run(suite) | import unittest
class IntegerArithmenticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
unittest.main(exit=False)Change unittest test in test suite : it is not run in module __main__import unittest
class IntegerArithmeticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase)
unittest.TextTestRunner(verbosity=0).run(suite) | <commit_before>import unittest
class IntegerArithmenticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
unittest.main(exit=False)<commit_msg>Change unittest test in test suite : it is not run in module __main__<commit_after>import unittest
class IntegerArithmeticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase)
unittest.TextTestRunner(verbosity=0).run(suite) |
14cfc8927b36a89947c1bd4cefc5be88ebbea1b5 | cheroot/test/conftest.py | cheroot/test/conftest.py | import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
EPHEMERAL_PORT = 0
config = {
'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| Make HTTP server fixture bind to an ephemeral port | Make HTTP server fixture bind to an ephemeral port
| Python | bsd-3-clause | cherrypy/cheroot | import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
Make HTTP server fixture bind to an ephemeral port | import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
EPHEMERAL_PORT = 0
config = {
'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| <commit_before>import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
<commit_msg>Make HTTP server fixture bind to an ephemeral port<commit_after> | import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
EPHEMERAL_PORT = 0
config = {
'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
Make HTTP server fixture bind to an ephemeral portimport threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
EPHEMERAL_PORT = 0
config = {
'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| <commit_before>import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
<commit_msg>Make HTTP server fixture bind to an ephemeral port<commit_after>import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
EPHEMERAL_PORT = 0
config = {
'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
|
bd70ef56d95958b8f105bdff31b675d66c40bca8 | serfnode/handler/supervisor.py | serfnode/handler/supervisor.py | import os
import subprocess
import docker_utils
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
| import os
import subprocess
import docker_utils
import docker
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def start_docker(target, name, cmdline):
start('app.conf', target=target,
ARGS='--cidfile=/app --name={} {}'.format(name, cmdline),
NAME=name)
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
| Add convenience function to start docker | Add convenience function to start docker
Mainly to be used from supervisor. | Python | mit | waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode | import os
import subprocess
import docker_utils
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
Add convenience function to start docker
Mainly to be used from supervisor. | import os
import subprocess
import docker_utils
import docker
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def start_docker(target, name, cmdline):
start('app.conf', target=target,
ARGS='--cidfile=/app --name={} {}'.format(name, cmdline),
NAME=name)
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
| <commit_before>import os
import subprocess
import docker_utils
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
<commit_msg>Add convenience function to start docker
Mainly to be used from supervisor.<commit_after> | import os
import subprocess
import docker_utils
import docker
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def start_docker(target, name, cmdline):
start('app.conf', target=target,
ARGS='--cidfile=/app --name={} {}'.format(name, cmdline),
NAME=name)
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
| import os
import subprocess
import docker_utils
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
Add convenience function to start docker
Mainly to be used from supervisor.import os
import subprocess
import docker_utils
import docker
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def start_docker(target, name, cmdline):
start('app.conf', target=target,
ARGS='--cidfile=/app --name={} {}'.format(name, cmdline),
NAME=name)
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
| <commit_before>import os
import subprocess
import docker_utils
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
<commit_msg>Add convenience function to start docker
Mainly to be used from supervisor.<commit_after>import os
import subprocess
import docker_utils
import docker
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader('/programs'))
def supervisor_install(block, **kwargs):
"""Update supervisor with `block` config.
- `block` is the name to a .conf template file (in directory
`/programs`)
- `kwargs` are the key/values to use in the template
"""
conf_filename = '{}.conf'.format(kwargs['target'])
template = env.get_template(block)
kwargs.update({
'DOCKER': docker_utils.DOCKER,
'DOCKER_SOCKET': docker_utils.DOCKER_SOCKET,
'DOCKER_RUN': docker_utils.DOCKER_RUN})
conf = template.render(kwargs)
with open(os.path.join(
'/etc/supervisor/conf.d', conf_filename), 'w') as f:
f.write(conf)
def supervisor_exec(*args):
return subprocess.check_output(
['supervisorctl'] + list(args))
def supervisor_update():
supervisor_exec('reread')
supervisor_exec('update')
def start(block, **kwargs):
supervisor_install(block, **kwargs)
supervisor_update()
supervisor_exec('start', '{}:*'.format(kwargs['target']))
def start_docker(target, name, cmdline):
start('app.conf', target=target,
ARGS='--cidfile=/app --name={} {}'.format(name, cmdline),
NAME=name)
def stop(block):
supervisor_exec('stop', '{}:*'.format(block))
|
8cc225db1e36785914885cdb547b8feaf1d4e8fc | brainhack/datasets.py | brainhack/datasets.py | import os
from nilearn.datasets import _get_dataset, _fetch_dataset
from sklearn.datasets.base import Bunch
def fetch_craddock_2012_test(n_subjects=None, data_dir=None, resume=True,
verbose=0):
"""Download and load example data from Craddock 2012 work.
Parameters
----------
n_subjects: int, optional
The number of subjects to load. If None is given, all the
3 subjects are used.
data_dir: string, optional
Path of the data directory. Used to force data storage in a specified
location. Default: None
resume: boolean, optional
Indicate if dataset fetching can be resumed from previous attempt.
Returns
-------
data: sklearn.datasets.base.Bunch
Dictionary-like object, the interest attributes are :
- 'func': string list. Paths to functional images
- 'mask': string. Path to nifti mask file.
References
----------
`A whole brain fMRI atlas generated via spatially constrained spectral
clustering <http://www.ncbi.nlm.nih.gov/pubmed/21769991>`_
Craddock, R. C., James, G. A., Holtzheimer, P. E., Hu, X. P.
& Mayberg, H. S. , Human Brain Mapping, 2012, 33,
1914-1928 doi: 10.1002/hbm.21333.
Notes
-----
Cameron Craddock provides his code for this work:
https://github.com/ccraddock/cluster_roi
"""
# Dataset files
file_names = ['gm_maskfile.nii.gz', 'subject1.nii.gz', 'subject2.nii.gz',
'subject3.nii.gz']
file_names = [os.path.join('pyClusterROI', fn) for fn in file_names]
# load the dataset
try:
# Try to load the dataset
files = _get_dataset("craddock_2012_test", file_names,
data_dir=data_dir)
except IOError:
# If the dataset does not exists, we download it
url = 'ftp://www.nitrc.org/home/groups/cluster_roi/htdocs/pyClusterROI/pyClusterROI_testdata.1.0.tar.gz'
_fetch_dataset('craddock_2012_test', [url], data_dir=data_dir,
resume=resume, verbose=verbose)
files = _get_dataset('craddock_2012_test', file_names,
data_dir=data_dir)
# return the data
return Bunch(mask=files[0], func=files[1:n_subjects])
| Add fetching function for craddock 2012 experiment | Add fetching function for craddock 2012 experiment
| Python | bsd-3-clause | AlexandreAbraham/brainhack2013 | Add fetching function for craddock 2012 experiment | import os
from nilearn.datasets import _get_dataset, _fetch_dataset
from sklearn.datasets.base import Bunch
def fetch_craddock_2012_test(n_subjects=None, data_dir=None, resume=True,
verbose=0):
"""Download and load example data from Craddock 2012 work.
Parameters
----------
n_subjects: int, optional
The number of subjects to load. If None is given, all the
3 subjects are used.
data_dir: string, optional
Path of the data directory. Used to force data storage in a specified
location. Default: None
resume: boolean, optional
Indicate if dataset fetching can be resumed from previous attempt.
Returns
-------
data: sklearn.datasets.base.Bunch
Dictionary-like object, the interest attributes are :
- 'func': string list. Paths to functional images
- 'mask': string. Path to nifti mask file.
References
----------
`A whole brain fMRI atlas generated via spatially constrained spectral
clustering <http://www.ncbi.nlm.nih.gov/pubmed/21769991>`_
Craddock, R. C., James, G. A., Holtzheimer, P. E., Hu, X. P.
& Mayberg, H. S. , Human Brain Mapping, 2012, 33,
1914-1928 doi: 10.1002/hbm.21333.
Notes
-----
Cameron Craddock provides his code for this work:
https://github.com/ccraddock/cluster_roi
"""
# Dataset files
file_names = ['gm_maskfile.nii.gz', 'subject1.nii.gz', 'subject2.nii.gz',
'subject3.nii.gz']
file_names = [os.path.join('pyClusterROI', fn) for fn in file_names]
# load the dataset
try:
# Try to load the dataset
files = _get_dataset("craddock_2012_test", file_names,
data_dir=data_dir)
except IOError:
# If the dataset does not exists, we download it
url = 'ftp://www.nitrc.org/home/groups/cluster_roi/htdocs/pyClusterROI/pyClusterROI_testdata.1.0.tar.gz'
_fetch_dataset('craddock_2012_test', [url], data_dir=data_dir,
resume=resume, verbose=verbose)
files = _get_dataset('craddock_2012_test', file_names,
data_dir=data_dir)
# return the data
return Bunch(mask=files[0], func=files[1:n_subjects])
| <commit_before><commit_msg>Add fetching function for craddock 2012 experiment<commit_after> | import os
from nilearn.datasets import _get_dataset, _fetch_dataset
from sklearn.datasets.base import Bunch
def fetch_craddock_2012_test(n_subjects=None, data_dir=None, resume=True,
verbose=0):
"""Download and load example data from Craddock 2012 work.
Parameters
----------
n_subjects: int, optional
The number of subjects to load. If None is given, all the
3 subjects are used.
data_dir: string, optional
Path of the data directory. Used to force data storage in a specified
location. Default: None
resume: boolean, optional
Indicate if dataset fetching can be resumed from previous attempt.
Returns
-------
data: sklearn.datasets.base.Bunch
Dictionary-like object, the interest attributes are :
- 'func': string list. Paths to functional images
- 'mask': string. Path to nifti mask file.
References
----------
`A whole brain fMRI atlas generated via spatially constrained spectral
clustering <http://www.ncbi.nlm.nih.gov/pubmed/21769991>`_
Craddock, R. C., James, G. A., Holtzheimer, P. E., Hu, X. P.
& Mayberg, H. S. , Human Brain Mapping, 2012, 33,
1914-1928 doi: 10.1002/hbm.21333.
Notes
-----
Cameron Craddock provides his code for this work:
https://github.com/ccraddock/cluster_roi
"""
# Dataset files
file_names = ['gm_maskfile.nii.gz', 'subject1.nii.gz', 'subject2.nii.gz',
'subject3.nii.gz']
file_names = [os.path.join('pyClusterROI', fn) for fn in file_names]
# load the dataset
try:
# Try to load the dataset
files = _get_dataset("craddock_2012_test", file_names,
data_dir=data_dir)
except IOError:
# If the dataset does not exists, we download it
url = 'ftp://www.nitrc.org/home/groups/cluster_roi/htdocs/pyClusterROI/pyClusterROI_testdata.1.0.tar.gz'
_fetch_dataset('craddock_2012_test', [url], data_dir=data_dir,
resume=resume, verbose=verbose)
files = _get_dataset('craddock_2012_test', file_names,
data_dir=data_dir)
# return the data
return Bunch(mask=files[0], func=files[1:n_subjects])
| Add fetching function for craddock 2012 experimentimport os
from nilearn.datasets import _get_dataset, _fetch_dataset
from sklearn.datasets.base import Bunch
def fetch_craddock_2012_test(n_subjects=None, data_dir=None, resume=True,
verbose=0):
"""Download and load example data from Craddock 2012 work.
Parameters
----------
n_subjects: int, optional
The number of subjects to load. If None is given, all the
3 subjects are used.
data_dir: string, optional
Path of the data directory. Used to force data storage in a specified
location. Default: None
resume: boolean, optional
Indicate if dataset fetching can be resumed from previous attempt.
Returns
-------
data: sklearn.datasets.base.Bunch
Dictionary-like object, the interest attributes are :
- 'func': string list. Paths to functional images
- 'mask': string. Path to nifti mask file.
References
----------
`A whole brain fMRI atlas generated via spatially constrained spectral
clustering <http://www.ncbi.nlm.nih.gov/pubmed/21769991>`_
Craddock, R. C., James, G. A., Holtzheimer, P. E., Hu, X. P.
& Mayberg, H. S. , Human Brain Mapping, 2012, 33,
1914-1928 doi: 10.1002/hbm.21333.
Notes
-----
Cameron Craddock provides his code for this work:
https://github.com/ccraddock/cluster_roi
"""
# Dataset files
file_names = ['gm_maskfile.nii.gz', 'subject1.nii.gz', 'subject2.nii.gz',
'subject3.nii.gz']
file_names = [os.path.join('pyClusterROI', fn) for fn in file_names]
# load the dataset
try:
# Try to load the dataset
files = _get_dataset("craddock_2012_test", file_names,
data_dir=data_dir)
except IOError:
# If the dataset does not exists, we download it
url = 'ftp://www.nitrc.org/home/groups/cluster_roi/htdocs/pyClusterROI/pyClusterROI_testdata.1.0.tar.gz'
_fetch_dataset('craddock_2012_test', [url], data_dir=data_dir,
resume=resume, verbose=verbose)
files = _get_dataset('craddock_2012_test', file_names,
data_dir=data_dir)
# return the data
return Bunch(mask=files[0], func=files[1:n_subjects])
| <commit_before><commit_msg>Add fetching function for craddock 2012 experiment<commit_after>import os
from nilearn.datasets import _get_dataset, _fetch_dataset
from sklearn.datasets.base import Bunch
def fetch_craddock_2012_test(n_subjects=None, data_dir=None, resume=True,
verbose=0):
"""Download and load example data from Craddock 2012 work.
Parameters
----------
n_subjects: int, optional
The number of subjects to load. If None is given, all the
3 subjects are used.
data_dir: string, optional
Path of the data directory. Used to force data storage in a specified
location. Default: None
resume: boolean, optional
Indicate if dataset fetching can be resumed from previous attempt.
Returns
-------
data: sklearn.datasets.base.Bunch
Dictionary-like object, the interest attributes are :
- 'func': string list. Paths to functional images
- 'mask': string. Path to nifti mask file.
References
----------
`A whole brain fMRI atlas generated via spatially constrained spectral
clustering <http://www.ncbi.nlm.nih.gov/pubmed/21769991>`_
Craddock, R. C., James, G. A., Holtzheimer, P. E., Hu, X. P.
& Mayberg, H. S. , Human Brain Mapping, 2012, 33,
1914-1928 doi: 10.1002/hbm.21333.
Notes
-----
Cameron Craddock provides his code for this work:
https://github.com/ccraddock/cluster_roi
"""
# Dataset files
file_names = ['gm_maskfile.nii.gz', 'subject1.nii.gz', 'subject2.nii.gz',
'subject3.nii.gz']
file_names = [os.path.join('pyClusterROI', fn) for fn in file_names]
# load the dataset
try:
# Try to load the dataset
files = _get_dataset("craddock_2012_test", file_names,
data_dir=data_dir)
except IOError:
# If the dataset does not exists, we download it
url = 'ftp://www.nitrc.org/home/groups/cluster_roi/htdocs/pyClusterROI/pyClusterROI_testdata.1.0.tar.gz'
_fetch_dataset('craddock_2012_test', [url], data_dir=data_dir,
resume=resume, verbose=verbose)
files = _get_dataset('craddock_2012_test', file_names,
data_dir=data_dir)
# return the data
return Bunch(mask=files[0], func=files[1:n_subjects])
| |
973669fce5fcc2360b4c72b3d1345d708e1ca0aa | examples/bench_randomizer.py | examples/bench_randomizer.py | import random
import hurdles
class BenchRandom(hurdles.BenchCase):
def bench_this(self):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
| import random
import hurdles
from hurdles.tools import extra_setup
class BenchRandom(hurdles.BenchCase):
@extra_setup("""import random""")
def bench_this(self, *args, **kwargs):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self, *args, **kwargs):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
| Update : don't forget *args, **kwargs in bench_case methods | Update : don't forget *args, **kwargs in bench_case methods
| Python | mit | oleiade/Hurdles | import random
import hurdles
class BenchRandom(hurdles.BenchCase):
def bench_this(self):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
Update : don't forget *args, **kwargs in bench_case methods | import random
import hurdles
from hurdles.tools import extra_setup
class BenchRandom(hurdles.BenchCase):
@extra_setup("""import random""")
def bench_this(self, *args, **kwargs):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self, *args, **kwargs):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
| <commit_before>import random
import hurdles
class BenchRandom(hurdles.BenchCase):
def bench_this(self):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
<commit_msg>Update : don't forget *args, **kwargs in bench_case methods<commit_after> | import random
import hurdles
from hurdles.tools import extra_setup
class BenchRandom(hurdles.BenchCase):
@extra_setup("""import random""")
def bench_this(self, *args, **kwargs):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self, *args, **kwargs):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
| import random
import hurdles
class BenchRandom(hurdles.BenchCase):
def bench_this(self):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
Update : don't forget *args, **kwargs in bench_case methodsimport random
import hurdles
from hurdles.tools import extra_setup
class BenchRandom(hurdles.BenchCase):
@extra_setup("""import random""")
def bench_this(self, *args, **kwargs):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self, *args, **kwargs):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
| <commit_before>import random
import hurdles
class BenchRandom(hurdles.BenchCase):
def bench_this(self):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
<commit_msg>Update : don't forget *args, **kwargs in bench_case methods<commit_after>import random
import hurdles
from hurdles.tools import extra_setup
class BenchRandom(hurdles.BenchCase):
@extra_setup("""import random""")
def bench_this(self, *args, **kwargs):
return [random.randint(1, 100000) for x in [0] * 100000]
def bench_that(self, *args, **kwargs):
return [random.randint(1, 10000) for y in [0] * 10000]
if __name__ == "__main__":
B = BenchRandom()
B.run()
|
f7e01bc27d6ec8e4398b30128b986227c81cbad7 | src/foremast/pipeline/__main__.py | src/foremast/pipeline/__main__.py | """Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="../raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
| """Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="./raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
| Remove going back a directory for properties | fix: Remove going back a directory for properties
See also: PSOBAT-1197
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | """Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="../raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
fix: Remove going back a directory for properties
See also: PSOBAT-1197 | """Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="./raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
| <commit_before>"""Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="../raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
<commit_msg>fix: Remove going back a directory for properties
See also: PSOBAT-1197<commit_after> | """Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="./raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
| """Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="../raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
fix: Remove going back a directory for properties
See also: PSOBAT-1197"""Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="./raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
| <commit_before>"""Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="../raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
<commit_msg>fix: Remove going back a directory for properties
See also: PSOBAT-1197<commit_after>"""Create Spinnaker Pipeline."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_pipeline import SpinnakerPipeline
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument(
"--triggerjob",
help="The jenkins job to monitor for pipeline triggering",
required=True)
parser.add_argument(
"--properties",
help="Location of json file that contains application.json details",
default="./raw.properties.json",
required=False)
# parser.add_argument("--vpc",
# help="The vpc to create the security group",
# required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for
# processing
appinfo = {
'app': args.app,
# 'vpc': args.vpc,
'triggerjob': args.triggerjob,
'properties': args.properties
}
spinnakerapps = SpinnakerPipeline(app_info=appinfo)
spinnakerapps.create_pipeline()
if __name__ == "__main__":
main()
|
beb1e10582de1aa3a6d8af121053ed2bdba1b1cf | apps/accounts/myaccount_urls.py | apps/accounts/myaccount_urls.py | """
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
| """
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
# FIXME Move to root urlconf if possible (to avoid useless dependency)
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
| Add fixme for future revision | Add fixme for future revision
| Python | agpl-3.0 | TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker | """
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
Add fixme for future revision | """
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
# FIXME Move to root urlconf if possible (to avoid useless dependency)
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
| <commit_before>"""
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
<commit_msg>Add fixme for future revision<commit_after> | """
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
# FIXME Move to root urlconf if possible (to avoid useless dependency)
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
| """
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
Add fixme for future revision"""
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
# FIXME Move to root urlconf if possible (to avoid useless dependency)
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
| <commit_before>"""
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
<commit_msg>Add fixme for future revision<commit_after>"""
URLCONF for the user accounts app (part 2/2).
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
# User accounts URL patterns configuration
urlpatterns = (
# My account page
url(r'^$', views.my_account_show, name='index'),
# Password change
url(r'^modification-mot-de-passe/$', auth_views.password_change, {
'post_change_redirect': 'myaccount:password_change_done',
'template_name': 'accounts/password_change_form.html'
}, name='password_change'),
url(r'^modification-mot-de-passe/ok/$', auth_views.password_change_done, {
'template_name': 'accounts/password_change_done.html'
}, name='password_change_done'),
# Email change
# FIXME Move to root urlconf if possible (to avoid useless dependency)
url(r'^modification-adresse-email/', include('apps.changemail.urls')),
)
|
7fcaa873db11e9fd74f37251d04f246c384e3d94 | kbkdna/dna.py | kbkdna/dna.py | #!/usr/bin/env python2
def reverse(seq):
return seq[::-1]
def complement(seq):
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
return reverse(complement(seq))
def gc_content(seq):
# This function contains a bug. Do you see it?
return sum(x in 'GC' for x in seq) / len(seq)
| #!/usr/bin/env python2
from __future__ import division
def reverse(seq):
"""Return the reverse of the given sequence (i.e. 3' to
5')."""
return seq[::-1]
def complement(seq):
"""Return the complement of the given sequence (i.e. G=>C,
A=>T, etc.)"""
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
"""Return the reverse complement of the given sequence
(e.g. the opposite strand)."""
return reverse(complement(seq))
# This function contains a bug. Do you see it?
def gc_content(seq):
"""Return the GC content of the given sequence (e.g. the
fraction of nucleotides that are either G or C)."""
return sum(x in 'GC' for x in seq) / len(seq)
| Fix the gc_content bug to get a clean Travis report. | Fix the gc_content bug to get a clean Travis report.
| Python | mit | kalekundert/kbkdna | #!/usr/bin/env python2
def reverse(seq):
return seq[::-1]
def complement(seq):
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
return reverse(complement(seq))
def gc_content(seq):
# This function contains a bug. Do you see it?
return sum(x in 'GC' for x in seq) / len(seq)
Fix the gc_content bug to get a clean Travis report. | #!/usr/bin/env python2
from __future__ import division
def reverse(seq):
"""Return the reverse of the given sequence (i.e. 3' to
5')."""
return seq[::-1]
def complement(seq):
"""Return the complement of the given sequence (i.e. G=>C,
A=>T, etc.)"""
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
"""Return the reverse complement of the given sequence
(e.g. the opposite strand)."""
return reverse(complement(seq))
# This function contains a bug. Do you see it?
def gc_content(seq):
"""Return the GC content of the given sequence (e.g. the
fraction of nucleotides that are either G or C)."""
return sum(x in 'GC' for x in seq) / len(seq)
| <commit_before>#!/usr/bin/env python2
def reverse(seq):
return seq[::-1]
def complement(seq):
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
return reverse(complement(seq))
def gc_content(seq):
# This function contains a bug. Do you see it?
return sum(x in 'GC' for x in seq) / len(seq)
<commit_msg>Fix the gc_content bug to get a clean Travis report.<commit_after> | #!/usr/bin/env python2
from __future__ import division
def reverse(seq):
"""Return the reverse of the given sequence (i.e. 3' to
5')."""
return seq[::-1]
def complement(seq):
"""Return the complement of the given sequence (i.e. G=>C,
A=>T, etc.)"""
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
"""Return the reverse complement of the given sequence
(e.g. the opposite strand)."""
return reverse(complement(seq))
# This function contains a bug. Do you see it?
def gc_content(seq):
"""Return the GC content of the given sequence (e.g. the
fraction of nucleotides that are either G or C)."""
return sum(x in 'GC' for x in seq) / len(seq)
| #!/usr/bin/env python2
def reverse(seq):
return seq[::-1]
def complement(seq):
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
return reverse(complement(seq))
def gc_content(seq):
# This function contains a bug. Do you see it?
return sum(x in 'GC' for x in seq) / len(seq)
Fix the gc_content bug to get a clean Travis report.#!/usr/bin/env python2
from __future__ import division
def reverse(seq):
"""Return the reverse of the given sequence (i.e. 3' to
5')."""
return seq[::-1]
def complement(seq):
"""Return the complement of the given sequence (i.e. G=>C,
A=>T, etc.)"""
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
"""Return the reverse complement of the given sequence
(e.g. the opposite strand)."""
return reverse(complement(seq))
# This function contains a bug. Do you see it?
def gc_content(seq):
"""Return the GC content of the given sequence (e.g. the
fraction of nucleotides that are either G or C)."""
return sum(x in 'GC' for x in seq) / len(seq)
| <commit_before>#!/usr/bin/env python2
def reverse(seq):
return seq[::-1]
def complement(seq):
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
return reverse(complement(seq))
def gc_content(seq):
# This function contains a bug. Do you see it?
return sum(x in 'GC' for x in seq) / len(seq)
<commit_msg>Fix the gc_content bug to get a clean Travis report.<commit_after>#!/usr/bin/env python2
from __future__ import division
def reverse(seq):
"""Return the reverse of the given sequence (i.e. 3' to
5')."""
return seq[::-1]
def complement(seq):
"""Return the complement of the given sequence (i.e. G=>C,
A=>T, etc.)"""
from string import maketrans
complements = maketrans('ACTGactg', 'TGACtgac')
return seq.translate(complements)
def reverse_complement(seq):
"""Return the reverse complement of the given sequence
(e.g. the opposite strand)."""
return reverse(complement(seq))
# This function contains a bug. Do you see it?
def gc_content(seq):
"""Return the GC content of the given sequence (e.g. the
fraction of nucleotides that are either G or C)."""
return sum(x in 'GC' for x in seq) / len(seq)
|
1214870ea94d63a543593fe4f8fde2a78807d166 | django_sphinx_db/backend/models.py | django_sphinx_db/backend/models.py | from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
| from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
def using(self, alias):
# Ignore the alias. This will allow the Django router to decide
# what db receives the query. Otherwise, when dealing with related
# models, Django tries to force all queries to the same database.
# This is the right thing to do in cases of master/slave or sharding
# but with Sphinx, we want all related queries to flow to Sphinx,
# never another configured database.
return self._clone()
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
| Handle the situation where a SphinxModel is related to a non-sphinx model. | Handle the situation where a SphinxModel is related to a non-sphinx model.
| Python | bsd-3-clause | rutube/django-sphinx-db,jnormore/django-sphinx-db,anatoliy-larin/django-sphinx-db,smartfile/django-sphinx-db,petekalo/django-sphinx-db | from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
Handle the situation where a SphinxModel is related to a non-sphinx model. | from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
def using(self, alias):
# Ignore the alias. This will allow the Django router to decide
# what db receives the query. Otherwise, when dealing with related
# models, Django tries to force all queries to the same database.
# This is the right thing to do in cases of master/slave or sharding
# but with Sphinx, we want all related queries to flow to Sphinx,
# never another configured database.
return self._clone()
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
| <commit_before>from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
<commit_msg>Handle the situation where a SphinxModel is related to a non-sphinx model.<commit_after> | from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
def using(self, alias):
# Ignore the alias. This will allow the Django router to decide
# what db receives the query. Otherwise, when dealing with related
# models, Django tries to force all queries to the same database.
# This is the right thing to do in cases of master/slave or sharding
# but with Sphinx, we want all related queries to flow to Sphinx,
# never another configured database.
return self._clone()
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
| from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
Handle the situation where a SphinxModel is related to a non-sphinx model.from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
def using(self, alias):
# Ignore the alias. This will allow the Django router to decide
# what db receives the query. Otherwise, when dealing with related
# models, Django tries to force all queries to the same database.
# This is the right thing to do in cases of master/slave or sharding
# but with Sphinx, we want all related queries to flow to Sphinx,
# never another configured database.
return self._clone()
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
| <commit_before>from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
<commit_msg>Handle the situation where a SphinxModel is related to a non-sphinx model.<commit_after>from django.db import models
from django.db.models.sql import Query
from django.db.models.query import QuerySet
from django_sphinx_db.backend.sphinx.compiler import SphinxWhereNode
class SphinxQuery(Query):
compiler = 'SphinxQLCompiler'
def __init__(self, *args, **kwargs):
kwargs.setdefault('where', SphinxWhereNode)
super(SphinxQuery, self).__init__(*args, **kwargs)
class SphinxQuerySet(QuerySet):
def __init__(self, model, **kwargs):
kwargs.setdefault('query', SphinxQuery(model))
super(SphinxQuerySet, self).__init__(model, **kwargs)
def using(self, alias):
# Ignore the alias. This will allow the Django router to decide
# what db receives the query. Otherwise, when dealing with related
# models, Django tries to force all queries to the same database.
# This is the right thing to do in cases of master/slave or sharding
# but with Sphinx, we want all related queries to flow to Sphinx,
# never another configured database.
return self._clone()
class SphinxManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
# Determine which fields are sphinx fields (full-text data) and
# defer loading them. Sphinx won't return them.
# TODO: we probably need a way to keep these from being loaded
# later if the attr is accessed.
sphinx_fields = [field.name for field in self.model._meta.fields \
if isinstance(field, SphinxField)]
return SphinxQuerySet(self.model).defer(*sphinx_fields)
class SphinxField(models.TextField):
pass
class SphinxModel(models.Model):
class Meta:
abstract = True
objects = SphinxManager()
|
3844c3e77da57b001ca55a9ae8eb34a08313728a | sktracker/__init__.py | sktracker/__init__.py | """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
| """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
utils
Utilities functions
"""
import logging
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
def setup_log(): # pragma: no cover
from .utils import color
from .utils import in_ipython
if in_ipython():
logformat = '%(asctime)s' + ':'
logformat += '%(levelname)s' + ':'
logformat += '%(name)s' + ':'
# logformat += '%(funcName)s' + ': '
logformat += ' %(message)s'
else:
logformat = color('%(asctime)s', 'BLUE') + ':'
logformat += color('%(levelname)s', 'RED') + ':'
logformat += color('%(name)s', 'YELLOW') + ':'
# logformat += color('%(funcName)s', 'GREEN') + ': '
logformat += color(' %(message)s', 'ENDC')
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(logformat, "%Y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
setup_log()
| Add setup_logging during sktracker init | Add setup_logging during sktracker init
| Python | bsd-3-clause | bnoi/scikit-tracker,bnoi/scikit-tracker,bnoi/scikit-tracker | """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
Add setup_logging during sktracker init | """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
utils
Utilities functions
"""
import logging
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
def setup_log(): # pragma: no cover
from .utils import color
from .utils import in_ipython
if in_ipython():
logformat = '%(asctime)s' + ':'
logformat += '%(levelname)s' + ':'
logformat += '%(name)s' + ':'
# logformat += '%(funcName)s' + ': '
logformat += ' %(message)s'
else:
logformat = color('%(asctime)s', 'BLUE') + ':'
logformat += color('%(levelname)s', 'RED') + ':'
logformat += color('%(name)s', 'YELLOW') + ':'
# logformat += color('%(funcName)s', 'GREEN') + ': '
logformat += color(' %(message)s', 'ENDC')
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(logformat, "%Y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
setup_log()
| <commit_before>"""Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
<commit_msg>Add setup_logging during sktracker init<commit_after> | """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
utils
Utilities functions
"""
import logging
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
def setup_log(): # pragma: no cover
from .utils import color
from .utils import in_ipython
if in_ipython():
logformat = '%(asctime)s' + ':'
logformat += '%(levelname)s' + ':'
logformat += '%(name)s' + ':'
# logformat += '%(funcName)s' + ': '
logformat += ' %(message)s'
else:
logformat = color('%(asctime)s', 'BLUE') + ':'
logformat += color('%(levelname)s', 'RED') + ':'
logformat += color('%(name)s', 'YELLOW') + ':'
# logformat += color('%(funcName)s', 'GREEN') + ': '
logformat += color(' %(message)s', 'ENDC')
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(logformat, "%Y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
setup_log()
| """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
Add setup_logging during sktracker init"""Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
utils
Utilities functions
"""
import logging
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
def setup_log(): # pragma: no cover
from .utils import color
from .utils import in_ipython
if in_ipython():
logformat = '%(asctime)s' + ':'
logformat += '%(levelname)s' + ':'
logformat += '%(name)s' + ':'
# logformat += '%(funcName)s' + ': '
logformat += ' %(message)s'
else:
logformat = color('%(asctime)s', 'BLUE') + ':'
logformat += color('%(levelname)s', 'RED') + ':'
logformat += color('%(name)s', 'YELLOW') + ':'
# logformat += color('%(funcName)s', 'GREEN') + ': '
logformat += color(' %(message)s', 'ENDC')
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(logformat, "%Y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
setup_log()
| <commit_before>"""Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
<commit_msg>Add setup_logging during sktracker init<commit_after>"""Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
utils
Utilities functions
"""
import logging
try:
from .version import __version__
except ImportError: # pragma: no cover
__version__ = "dev" # pragma: no cover
from . import utils
def setup_log(): # pragma: no cover
from .utils import color
from .utils import in_ipython
if in_ipython():
logformat = '%(asctime)s' + ':'
logformat += '%(levelname)s' + ':'
logformat += '%(name)s' + ':'
# logformat += '%(funcName)s' + ': '
logformat += ' %(message)s'
else:
logformat = color('%(asctime)s', 'BLUE') + ':'
logformat += color('%(levelname)s', 'RED') + ':'
logformat += color('%(name)s', 'YELLOW') + ':'
# logformat += color('%(funcName)s', 'GREEN') + ': '
logformat += color(' %(message)s', 'ENDC')
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(logformat, "%Y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
setup_log()
|
c566236de3373aa73c271aaf412de60538c2abfb | common/renderers/excel_renderer.py | common/renderers/excel_renderer.py | import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'human.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
worksheet.write(row, col, data_dict[key])
col = col + 1
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
| import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'download.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
if not isinstance(data[key], list):
worksheet.write(row, col, data_dict[key])
col = col + 1
else:
_write_excel_file()
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
| Add support for nested lists in the excel renderer | Add support for nested lists in the excel renderer
| Python | mit | MasterFacilityList/mfl_api,urandu/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api,urandu/mfl_api,urandu/mfl_api | import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'human.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
worksheet.write(row, col, data_dict[key])
col = col + 1
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
Add support for nested lists in the excel renderer | import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'download.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
if not isinstance(data[key], list):
worksheet.write(row, col, data_dict[key])
col = col + 1
else:
_write_excel_file()
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
| <commit_before>import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'human.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
worksheet.write(row, col, data_dict[key])
col = col + 1
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
<commit_msg>Add support for nested lists in the excel renderer<commit_after> | import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'download.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
if not isinstance(data[key], list):
worksheet.write(row, col, data_dict[key])
col = col + 1
else:
_write_excel_file()
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
| import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'human.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
worksheet.write(row, col, data_dict[key])
col = col + 1
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
Add support for nested lists in the excel rendererimport xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'download.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
if not isinstance(data[key], list):
worksheet.write(row, col, data_dict[key])
col = col + 1
else:
_write_excel_file()
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
| <commit_before>import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'human.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
worksheet.write(row, col, data_dict[key])
col = col + 1
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
<commit_msg>Add support for nested lists in the excel renderer<commit_after>import xlsxwriter
import os
from django.conf import settings
from rest_framework import renderers
def _write_excel_file(data):
result = data.get('results')
work_book_name = 'download.xlsx'
workbook = xlsxwriter.Workbook(work_book_name)
worksheet = workbook.add_worksheet()
row = 0
col = 0
data_dict = result[0]
data_keys = data_dict.keys()
for key in data_keys:
worksheet.write(row, col, key)
col = col + 1
row = 1
col = 0
for data_dict in result:
for key in data_keys:
if not isinstance(data[key], list):
worksheet.write(row, col, data_dict[key])
col = col + 1
else:
_write_excel_file()
row = row + 1
workbook.close()
return work_book_name
class ExcelRenderer(renderers.BaseRenderer):
media_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' # noqa
format = 'excel'
def render(self, data, accepted_media_type=None, renderer_context=None):
file_name = _write_excel_file(data)
file_path = os.path.join(settings.BASE_DIR, file_name)
with open(file_path, 'r') as excel_file:
file_data = excel_file.read()
return file_data
|
7e3b0ab5366756018e3bcaa50843e7d28ab7643c | codemood/common/views.py | codemood/common/views.py | from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
def get_template_names(self):
if self.request.user.is_authenticated():
return 'index/authorized.html'
else:
return 'index/not-authorized.html'
def get_context_data(self, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
| from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
"""
Return different view to authenticated and not.
"""
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_authenticated():
return AuthenticatedIndex.as_view()(self.request)
else:
return NotAuthenticatedIndex.as_view()(self.request)
class AuthenticatedIndex(TemplateView):
"""
View to authenticated user
"""
template_name = 'index/authorized.html'
def get_context_data(self, **kwargs):
context = super(AuthenticatedIndex, self).get_context_data(**kwargs)
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
class NotAuthenticatedIndex(TemplateView):
"""
View to NOT authenticated user
"""
template_name = 'index/not-authorized.html'
| Split Index view to AuthenticatedIndex view and NotAuthenticatedIndex view. | Split Index view to AuthenticatedIndex view and NotAuthenticatedIndex view.
| Python | mit | mindinpanic/codingmood,pavlenko-volodymyr/codingmood,mindinpanic/codingmood,pavlenko-volodymyr/codingmood | from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
def get_template_names(self):
if self.request.user.is_authenticated():
return 'index/authorized.html'
else:
return 'index/not-authorized.html'
def get_context_data(self, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
Split Index view to AuthenticatedIndex view and NotAuthenticatedIndex view. | from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
"""
Return different view to authenticated and not.
"""
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_authenticated():
return AuthenticatedIndex.as_view()(self.request)
else:
return NotAuthenticatedIndex.as_view()(self.request)
class AuthenticatedIndex(TemplateView):
"""
View to authenticated user
"""
template_name = 'index/authorized.html'
def get_context_data(self, **kwargs):
context = super(AuthenticatedIndex, self).get_context_data(**kwargs)
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
class NotAuthenticatedIndex(TemplateView):
"""
View to NOT authenticated user
"""
template_name = 'index/not-authorized.html'
| <commit_before>from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
def get_template_names(self):
if self.request.user.is_authenticated():
return 'index/authorized.html'
else:
return 'index/not-authorized.html'
def get_context_data(self, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
<commit_msg>Split Index view to AuthenticatedIndex view and NotAuthenticatedIndex view.<commit_after> | from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
"""
Return different view to authenticated and not.
"""
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_authenticated():
return AuthenticatedIndex.as_view()(self.request)
else:
return NotAuthenticatedIndex.as_view()(self.request)
class AuthenticatedIndex(TemplateView):
"""
View to authenticated user
"""
template_name = 'index/authorized.html'
def get_context_data(self, **kwargs):
context = super(AuthenticatedIndex, self).get_context_data(**kwargs)
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
class NotAuthenticatedIndex(TemplateView):
"""
View to NOT authenticated user
"""
template_name = 'index/not-authorized.html'
| from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
def get_template_names(self):
if self.request.user.is_authenticated():
return 'index/authorized.html'
else:
return 'index/not-authorized.html'
def get_context_data(self, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
Split Index view to AuthenticatedIndex view and NotAuthenticatedIndex view.from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
"""
Return different view to authenticated and not.
"""
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_authenticated():
return AuthenticatedIndex.as_view()(self.request)
else:
return NotAuthenticatedIndex.as_view()(self.request)
class AuthenticatedIndex(TemplateView):
"""
View to authenticated user
"""
template_name = 'index/authorized.html'
def get_context_data(self, **kwargs):
context = super(AuthenticatedIndex, self).get_context_data(**kwargs)
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
class NotAuthenticatedIndex(TemplateView):
"""
View to NOT authenticated user
"""
template_name = 'index/not-authorized.html'
| <commit_before>from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
def get_template_names(self):
if self.request.user.is_authenticated():
return 'index/authorized.html'
else:
return 'index/not-authorized.html'
def get_context_data(self, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
<commit_msg>Split Index view to AuthenticatedIndex view and NotAuthenticatedIndex view.<commit_after>from django.views.generic import TemplateView
from commits.forms import RepositoryForm
from commits.models import Repository, Commit
from social.models import Post
class Index(TemplateView):
"""
Return different view to authenticated and not.
"""
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_authenticated():
return AuthenticatedIndex.as_view()(self.request)
else:
return NotAuthenticatedIndex.as_view()(self.request)
class AuthenticatedIndex(TemplateView):
"""
View to authenticated user
"""
template_name = 'index/authorized.html'
def get_context_data(self, **kwargs):
context = super(AuthenticatedIndex, self).get_context_data(**kwargs)
if self.request.method == 'POST':
repository_form = RepositoryForm(self.request)
else:
repository_form = RepositoryForm()
context['repository_form'] = repository_form
#add filtering by user
context['git_activity_list'] = Commit.objects.all()
context['repositories'] = Repository.objects.all()
context['fb_activity_list'] = Post.objects.filter(user=self.request.user).order_by('created')
return context
class NotAuthenticatedIndex(TemplateView):
"""
View to NOT authenticated user
"""
template_name = 'index/not-authorized.html'
|
efc3aa4868eebf514b853a054cf382c6a9fb44a5 | server/middleware/AddToBU.py | server/middleware/AddToBU.py | from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated():
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
| from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated:
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
| Make `is_authenticated` a property access rather than a function call. | Make `is_authenticated` a property access rather than a function call.
This is a change in Django that was still functional for compatibility
reasons until recently, but ultimately should be an attribute.
| Python | apache-2.0 | sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,salopensource/sal,sheagcraig/sal,salopensource/sal | from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated():
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
Make `is_authenticated` a property access rather than a function call.
This is a change in Django that was still functional for compatibility
reasons until recently, but ultimately should be an attribute. | from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated:
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
| <commit_before>from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated():
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
<commit_msg>Make `is_authenticated` a property access rather than a function call.
This is a change in Django that was still functional for compatibility
reasons until recently, but ultimately should be an attribute.<commit_after> | from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated:
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
| from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated():
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
Make `is_authenticated` a property access rather than a function call.
This is a change in Django that was still functional for compatibility
reasons until recently, but ultimately should be an attribute.from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated:
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
| <commit_before>from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated():
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
<commit_msg>Make `is_authenticated` a property access rather than a function call.
This is a change in Django that was still functional for compatibility
reasons until recently, but ultimately should be an attribute.<commit_after>from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from server.models import *
class AddToBU(MiddlewareMixin):
"""
This middleware will add the current user to any BU's they've not already
been explicitly added to.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if hasattr(settings, 'ADD_TO_ALL_BUSINESS_UNITS'):
if request.user.is_authenticated:
if settings.ADD_TO_ALL_BUSINESS_UNITS \
and request.user.userprofile.level != 'GA':
for business_unit in BusinessUnit.objects.all():
if request.user not in business_unit.users.all():
business_unit.users.add(request.user)
business_unit.save()
return None
|
a260020f10b4d993635e579c8b130e754c49f7aa | dogebuild/dogefile_loader.py | dogebuild/dogefile_loader.py | from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
exec(code)
return ContextHolder.clear_and_get()
| import os
from pathlib import Path
from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
dir = Path(filename).resolve().parent
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
cwd = os.getcwd()
os.chdir(dir)
exec(code)
os.chdir(cwd)
return ContextHolder.clear_and_get()
| Add cirectoy switch on loading | Add cirectoy switch on loading
| Python | mit | dogebuild/dogebuild | from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
exec(code)
return ContextHolder.clear_and_get()
Add cirectoy switch on loading | import os
from pathlib import Path
from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
dir = Path(filename).resolve().parent
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
cwd = os.getcwd()
os.chdir(dir)
exec(code)
os.chdir(cwd)
return ContextHolder.clear_and_get()
| <commit_before>from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
exec(code)
return ContextHolder.clear_and_get()
<commit_msg>Add cirectoy switch on loading<commit_after> | import os
from pathlib import Path
from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
dir = Path(filename).resolve().parent
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
cwd = os.getcwd()
os.chdir(dir)
exec(code)
os.chdir(cwd)
return ContextHolder.clear_and_get()
| from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
exec(code)
return ContextHolder.clear_and_get()
Add cirectoy switch on loadingimport os
from pathlib import Path
from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
dir = Path(filename).resolve().parent
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
cwd = os.getcwd()
os.chdir(dir)
exec(code)
os.chdir(cwd)
return ContextHolder.clear_and_get()
| <commit_before>from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
exec(code)
return ContextHolder.clear_and_get()
<commit_msg>Add cirectoy switch on loading<commit_after>import os
from pathlib import Path
from dogebuild.plugins import ContextHolder
from dogebuild.common import DOGE_FILE
def load_doge_file(filename):
dir = Path(filename).resolve().parent
with open(filename) as f:
code = compile(f.read(), DOGE_FILE, 'exec')
ContextHolder.create()
cwd = os.getcwd()
os.chdir(dir)
exec(code)
os.chdir(cwd)
return ContextHolder.clear_and_get()
|
06d2bb81d19ba3089bddeb77e7e85482b5f0596b | cms/djangoapps/contentstore/management/commands/export_all_courses.py | cms/djangoapps/contentstore/management/commands/export_all_courses.py | """
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
| """
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.to_deprecated_string().replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
| Fix course id separator at export all courses command | Fix course id separator at export all courses command
| Python | agpl-3.0 | morenopc/edx-platform,morenopc/edx-platform,morenopc/edx-platform,morenopc/edx-platform,morenopc/edx-platform | """
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
Fix course id separator at export all courses command | """
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.to_deprecated_string().replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
| <commit_before>"""
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
<commit_msg>Fix course id separator at export all courses command<commit_after> | """
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.to_deprecated_string().replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
| """
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
Fix course id separator at export all courses command"""
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.to_deprecated_string().replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
| <commit_before>"""
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
<commit_msg>Fix course id separator at export all courses command<commit_after>"""
Script for exporting all courseware from Mongo to a directory
"""
from django.core.management.base import BaseCommand, CommandError
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
class Command(BaseCommand):
"""Export all courses from mongo to the specified data directory"""
help = 'Export all courses from mongo to the specified data directory'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 1:
raise CommandError("export requires one argument: <output path>")
output_path = args[0]
cs = contentstore()
ms = modulestore('direct')
root_dir = output_path
courses = ms.get_courses()
print("%d courses to export:" % len(courses))
cids = [x.id for x in courses]
print(cids)
for course_id in cids:
print("-"*77)
print("Exporting course id = {0} to {1}".format(course_id, output_path))
if 1:
try:
course_dir = course_id.to_deprecated_string().replace('/', '...')
export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore())
except Exception as err:
print("="*30 + "> Oops, failed to export %s" % course_id)
print("Error:")
print(err)
|
c4068d47da3b98f8fcc38bde6ab477174ab92a3f | djlint/analyzers/context.py | djlint/analyzers/context.py | class ContextPopException(Exception):
pass
class Context(object):
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
| """Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
| Remove Context.get method and add docstrings | Remove Context.get method and add docstrings
| Python | isc | alfredhq/djlint | class ContextPopException(Exception):
pass
class Context(object):
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
Remove Context.get method and add docstrings | """Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
| <commit_before>class ContextPopException(Exception):
pass
class Context(object):
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
<commit_msg>Remove Context.get method and add docstrings<commit_after> | """Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
| class ContextPopException(Exception):
pass
class Context(object):
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
Remove Context.get method and add docstrings"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
| <commit_before>class ContextPopException(Exception):
pass
class Context(object):
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
<commit_msg>Remove Context.get method and add docstrings<commit_after>"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
|
73614f076e93794dde784b6fc376ca85fbb5bc21 | FileWatcher.py | FileWatcher.py | from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = False | import sys
# FSEvents observer in watchdog cannot have multiple watchers of the same path
# use kqueue instead
if sys.platform == 'darwin':
from watchdog.observers.kqueue import KqueueObserver as Observer
else:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = False | Work around for watchdog problem on OS X. | Work around for watchdog problem on OS X.
| Python | apache-2.0 | rmcgurrin/PyQLab,calebjordan/PyQLab,Plourde-Research-Lab/PyQLab,BBN-Q/PyQLab | from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = FalseWork around for watchdog problem on OS X. | import sys
# FSEvents observer in watchdog cannot have multiple watchers of the same path
# use kqueue instead
if sys.platform == 'darwin':
from watchdog.observers.kqueue import KqueueObserver as Observer
else:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = False | <commit_before>from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = False<commit_msg>Work around for watchdog problem on OS X.<commit_after> | import sys
# FSEvents observer in watchdog cannot have multiple watchers of the same path
# use kqueue instead
if sys.platform == 'darwin':
from watchdog.observers.kqueue import KqueueObserver as Observer
else:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = False | from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = FalseWork around for watchdog problem on OS X.import sys
# FSEvents observer in watchdog cannot have multiple watchers of the same path
# use kqueue instead
if sys.platform == 'darwin':
from watchdog.observers.kqueue import KqueueObserver as Observer
else:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = False | <commit_before>from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = False<commit_msg>Work around for watchdog problem on OS X.<commit_after>import sys
# FSEvents observer in watchdog cannot have multiple watchers of the same path
# use kqueue instead
if sys.platform == 'darwin':
from watchdog.observers.kqueue import KqueueObserver as Observer
else:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
import time
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
self.paused = True
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
if not self.paused:
"""
Hold off for half a second
If the event is from the file being opened to be written this gives
time for it to be written.
"""
time.sleep(0.5)
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.eventHandler.paused = True
def resume(self):
self.eventHandler.paused = False |
63fd487b8f00490c34e8dbcddcd6d7a9c070d457 | cs251tk/toolkit/process_student.py | cs251tk/toolkit/process_student.py | import os
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
| from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
| Remove leftover imports from testing | Remove leftover imports from testing | Python | mit | StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit | import os
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
Remove leftover imports from testing | from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
| <commit_before>import os
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
<commit_msg>Remove leftover imports from testing<commit_after> | from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
| import os
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
Remove leftover imports from testingfrom cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
| <commit_before>import os
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
<commit_msg>Remove leftover imports from testing<commit_after>from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
|
5fe4dab51c8b7c725b49bd6352fbf531003ead4e | openpnm/topotools/generators/__init__.py | openpnm/topotools/generators/__init__.py | from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
| r"""
================================================
Generators (:mod:`openpnm.topotools.generators`)
================================================
This module contains a selection of functions that deal specifically with
generating sufficient information that can be turned into an openpnm network.
.. currentmodule:: openpnm.topotools.generators
.. autosummary::
:template: mybase.rst
:toctree: generated/
:nosignatures:
cubic
delaunay
gabriel
voronoi
voronoi_delaunay_dual
cubic_template
fcc
bcc
"""
from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
| Add docstrings to generators' init file | Add docstrings to generators' init file
| Python | mit | PMEAL/OpenPNM | from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
Add docstrings to generators' init file | r"""
================================================
Generators (:mod:`openpnm.topotools.generators`)
================================================
This module contains a selection of functions that deal specifically with
generating sufficient information that can be turned into an openpnm network.
.. currentmodule:: openpnm.topotools.generators
.. autosummary::
:template: mybase.rst
:toctree: generated/
:nosignatures:
cubic
delaunay
gabriel
voronoi
voronoi_delaunay_dual
cubic_template
fcc
bcc
"""
from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
| <commit_before>from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
<commit_msg>Add docstrings to generators' init file<commit_after> | r"""
================================================
Generators (:mod:`openpnm.topotools.generators`)
================================================
This module contains a selection of functions that deal specifically with
generating sufficient information that can be turned into an openpnm network.
.. currentmodule:: openpnm.topotools.generators
.. autosummary::
:template: mybase.rst
:toctree: generated/
:nosignatures:
cubic
delaunay
gabriel
voronoi
voronoi_delaunay_dual
cubic_template
fcc
bcc
"""
from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
| from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
Add docstrings to generators' init filer"""
================================================
Generators (:mod:`openpnm.topotools.generators`)
================================================
This module contains a selection of functions that deal specifically with
generating sufficient information that can be turned into an openpnm network.
.. currentmodule:: openpnm.topotools.generators
.. autosummary::
:template: mybase.rst
:toctree: generated/
:nosignatures:
cubic
delaunay
gabriel
voronoi
voronoi_delaunay_dual
cubic_template
fcc
bcc
"""
from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
| <commit_before>from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
<commit_msg>Add docstrings to generators' init file<commit_after>r"""
================================================
Generators (:mod:`openpnm.topotools.generators`)
================================================
This module contains a selection of functions that deal specifically with
generating sufficient information that can be turned into an openpnm network.
.. currentmodule:: openpnm.topotools.generators
.. autosummary::
:template: mybase.rst
:toctree: generated/
:nosignatures:
cubic
delaunay
gabriel
voronoi
voronoi_delaunay_dual
cubic_template
fcc
bcc
"""
from .cubic import cubic
from .delaunay import delaunay
from .gabriel import gabriel
from .voronoi import voronoi
from .voronoi_delaunay_dual import voronoi_delaunay_dual
from .template import cubic_template
from .fcc import fcc
from .bcc import bcc
|
872e02dad33b42a804d0e28a500fd60947bc3ea2 | inferno/lib/notifications.py | inferno/lib/notifications.py | import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Failed: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
| import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Status: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
| Clean up email notification message | Clean up email notification message
| Python | mit | chango/inferno,pombredanne/inferno,oldmantaiter/inferno | import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Failed: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
Clean up email notification message | import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Status: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
| <commit_before>import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Failed: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
<commit_msg>Clean up email notification message<commit_after> | import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Status: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
| import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Failed: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
Clean up email notification messageimport smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Status: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
| <commit_before>import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Failed: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
<commit_msg>Clean up email notification message<commit_after>import smtplib
from email.mime.text import MIMEText
def send_mail(job_id=None, job_fail=None, mail_to=None, mail_from=None, mail_server=None):
mail_from = "Inferno Daemon <inferno@localhost.localdomain>" if not mail_from else mail_from
if not job_id or not job_fail:
raise Exception("Empty job failure reason or job id: Cannot continue")
if not mail_to:
raise Exception("mail_to cannot be empty: Requires a list of recipient addresses")
mail_server = "localhost" if not mail_server else mail_server
msg_body = str(job_fail)
msg = MIMEText(msg_body)
msg['Subject'] = "Job Status: %s" % job_id
msg['From'] = mail_from
msg['To'] = ", ".join(mail_to)
try:
s = smtplib.SMTP(mail_server)
s.sendmail(mail_from, mail_to, msg.as_string())
s.quit()
return True
except:
return False
|
2ae4fb0dfa4c53e8dc80f3997cb3f9f8d9ad962a | src/ansible/models.py | src/ansible/models.py | from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
| from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
class Meta:
verbose_name_plural = "registries"
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
| Fix plural form of Registry | Fix plural form of Registry
TIL how to use meta class
| Python | bsd-3-clause | lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin | from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
Fix plural form of Registry
TIL how to use meta class | from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
class Meta:
verbose_name_plural = "registries"
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
| <commit_before>from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
<commit_msg>Fix plural form of Registry
TIL how to use meta class<commit_after> | from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
class Meta:
verbose_name_plural = "registries"
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
| from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
Fix plural form of Registry
TIL how to use meta classfrom django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
class Meta:
verbose_name_plural = "registries"
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
| <commit_before>from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
<commit_msg>Fix plural form of Registry
TIL how to use meta class<commit_after>from django.db import models
class Project(models.Model):
project_name = models.CharField(max_length=200)
playbook_path = models.CharField(max_length=200, default="~/")
ansible_config_path = models.CharField(max_length=200, default="~/")
default_inventory = models.CharField(max_length=200, default="hosts")
default_user = models.CharField(max_length=200, default="ubuntu")
class Registry(models.Model):
class Meta:
verbose_name_plural = "registries"
project = models.OneToOneField(
Project,
on_delete=models.CASCADE,
primary_key=True,
)
name = models.CharField(max_length=200)
def __str__(self):
return "project name: %s" % self.project.project_name
|
346d16c034450cc2cb4f26a5fcc71e721e1ac607 | api/setup.py | api/setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"]),
('share/humbug/integrations/trac', glob.glob('integrations/trac/*')),
('share/humbug/integrations/nagios', glob.glob('integrations/nagios/*')),
],
scripts=["bin/humbug-send"],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
| Fix need to manually update list of integrations. | api: Fix need to manually update list of integrations.
(imported from commit 6842230f939483d32acb023ad38c53cb627df149)
| Python | apache-2.0 | yuvipanda/zulip,ryanbackman/zulip,Diptanshu8/zulip,LAndreas/zulip,joyhchen/zulip,vakila/zulip,DazWorrall/zulip,avastu/zulip,rht/zulip,PhilSk/zulip,udxxabp/zulip,brainwane/zulip,isht3/zulip,thomasboyt/zulip,bluesea/zulip,kou/zulip,zorojean/zulip,wavelets/zulip,timabbott/zulip,dawran6/zulip,Frouk/zulip,zachallaun/zulip,zacps/zulip,arpith/zulip,dnmfarrell/zulip,dawran6/zulip,ikasumiwt/zulip,PaulPetring/zulip,LeeRisk/zulip,zofuthan/zulip,ufosky-server/zulip,ipernet/zulip,adnanh/zulip,umkay/zulip,umkay/zulip,babbage/zulip,gkotian/zulip,jackrzhang/zulip,avastu/zulip,Jianchun1/zulip,KJin99/zulip,ApsOps/zulip,aps-sids/zulip,mohsenSy/zulip,ipernet/zulip,natanovia/zulip,punchagan/zulip,littledogboy/zulip,so0k/zulip,zulip/zulip,arpitpanwar/zulip,eastlhu/zulip,ryanbackman/zulip,Gabriel0402/zulip,isht3/zulip,jerryge/zulip,shaunstanislaus/zulip,kokoar/zulip,yuvipanda/zulip,bluesea/zulip,dattatreya303/zulip,zulip/zulip,TigorC/zulip,grave-w-grave/zulip,souravbadami/zulip,ikasumiwt/zulip,blaze225/zulip,hustlzp/zulip,cosmicAsymmetry/zulip,littledogboy/zulip,hj3938/zulip,bowlofstew/zulip,jerryge/zulip,wavelets/zulip,SmartPeople/zulip,dwrpayne/zulip,mdavid/zulip,bastianh/zulip,wavelets/zulip,KingxBanana/zulip,mdavid/zulip,zwily/zulip,swinghu/zulip,avastu/zulip,yocome/zulip,Batterfii/zulip,jainayush975/zulip,stamhe/zulip,MariaFaBella85/zulip,gigawhitlocks/zulip,vabs22/zulip,levixie/zulip,aakash-cr7/zulip,jphilipsen05/zulip,Juanvulcano/zulip,lfranchi/zulip,praveenaki/zulip,avastu/zulip,mahim97/zulip,Vallher/zulip,ahmadassaf/zulip,cosmicAsymmetry/zulip,tommyip/zulip,saitodisse/zulip,zulip/zulip,atomic-labs/zulip,vaidap/zulip,grave-w-grave/zulip,nicholasbs/zulip,codeKonami/zulip,praveenaki/zulip,amallia/zulip,pradiptad/zulip,tbutter/zulip,huangkebo/zulip,rht/zulip,tbutter/zulip,TigorC/zulip,kou/zulip,ashwinirudrappa/zulip,paxapy/zulip,MariaFaBella85/zulip,rht/zulip,dotcool/zulip,deer-hope/zulip,sharmaeklavya2/zulip,joyhchen/zulip,karamcnair/zulip,paxapy/zulip,grave-w-grave/zulip,showell/zulip,mansilladev/zulip,hackerkid/zulip,krtkmj/zulip,arpith/zulip,wavelets/zulip,DazWorrall/zulip,timabbott/zulip,shubhamdhama/zulip,MariaFaBella85/zulip,deer-hope/zulip,bowlofstew/zulip,jeffcao/zulip,isht3/zulip,niftynei/zulip,jerryge/zulip,esander91/zulip,synicalsyntax/zulip,zhaoweigg/zulip,easyfmxu/zulip,huangkebo/zulip,technicalpickles/zulip,umkay/zulip,shubhamdhama/zulip,dattatreya303/zulip,susansls/zulip,esander91/zulip,developerfm/zulip,ryanbackman/zulip,hustlzp/zulip,johnny9/zulip,dnmfarrell/zulip,peiwei/zulip,amyliu345/zulip,m1ssou/zulip,bowlofstew/zulip,jphilipsen05/zulip,isht3/zulip,ufosky-server/zulip,Juanvulcano/zulip,jerryge/zulip,babbage/zulip,dnmfarrell/zulip,synicalsyntax/zulip,Batterfii/zulip,zachallaun/zulip,themass/zulip,amyliu345/zulip,JanzTam/zulip,niftynei/zulip,swinghu/zulip,shaunstanislaus/zulip,zwily/zulip,EasonYi/zulip,showell/zulip,seapasulli/zulip,zwily/zulip,hayderimran7/zulip,Qgap/zulip,zofuthan/zulip,alliejones/zulip,he15his/zulip,armooo/zulip,moria/zulip,gigawhitlocks/zulip,saitodisse/zulip,jackrzhang/zulip,arpitpanwar/zulip,tdr130/zulip,christi3k/zulip,tiansiyuan/zulip,jessedhillon/zulip,themass/zulip,Batterfii/zulip,isht3/zulip,babbage/zulip,sup95/zulip,mohsenSy/zulip,amanharitsh123/zulip,esander91/zulip,firstblade/zulip,eeshangarg/zulip,littledogboy/zulip,peguin40/zulip,swinghu/zulip,dawran6/zulip,johnnygaddarr/zulip,zacps/zulip,christi3k/zulip,wangdeshui/zulip,atomic-labs/zulip,zhaoweigg/zulip,arpith/zulip,vabs22/zulip,dattatreya303/zulip,jeffcao/zulip,shaunstanislaus/zulip,samatdav/zulip,jerryge/zulip,kaiyuanheshang/zulip,yocome/zulip,Juanvulcano/zulip,ApsOps/zulip,aps-sids/zulip,jonesgithub/zulip,babbage/zulip,KingxBanana/zulip,dotcool/zulip,ryansnowboarder/zulip,brainwane/zulip,peguin40/zulip,hustlzp/zulip,jainayush975/zulip,zacps/zulip,Suninus/zulip,souravbadami/zulip,gigawhitlocks/zulip,andersk/zulip,karamcnair/zulip,bowlofstew/zulip,vikas-parashar/zulip,tbutter/zulip,xuxiao/zulip,shrikrishnaholla/zulip,bssrdf/zulip,saitodisse/zulip,stamhe/zulip,atomic-labs/zulip,peguin40/zulip,esander91/zulip,verma-varsha/zulip,praveenaki/zulip,levixie/zulip,schatt/zulip,stamhe/zulip,vabs22/zulip,glovebx/zulip,xuanhan863/zulip,j831/zulip,mdavid/zulip,fw1121/zulip,jessedhillon/zulip,Jianchun1/zulip,jessedhillon/zulip,alliejones/zulip,pradiptad/zulip,itnihao/zulip,rht/zulip,tommyip/zulip,aakash-cr7/zulip,alliejones/zulip,andersk/zulip,m1ssou/zulip,RobotCaleb/zulip,Diptanshu8/zulip,cosmicAsymmetry/zulip,stamhe/zulip,zwily/zulip,reyha/zulip,jainayush975/zulip,easyfmxu/zulip,pradiptad/zulip,kaiyuanheshang/zulip,m1ssou/zulip,aliceriot/zulip,alliejones/zulip,reyha/zulip,dnmfarrell/zulip,sharmaeklavya2/zulip,verma-varsha/zulip,fw1121/zulip,ufosky-server/zulip,vikas-parashar/zulip,Diptanshu8/zulip,moria/zulip,hayderimran7/zulip,codeKonami/zulip,MayB/zulip,noroot/zulip,Diptanshu8/zulip,zachallaun/zulip,zacps/zulip,blaze225/zulip,wdaher/zulip,souravbadami/zulip,vaidap/zulip,Gabriel0402/zulip,wangdeshui/zulip,tdr130/zulip,kaiyuanheshang/zulip,mohsenSy/zulip,aps-sids/zulip,udxxabp/zulip,vikas-parashar/zulip,bowlofstew/zulip,Vallher/zulip,tdr130/zulip,Galexrt/zulip,luyifan/zulip,moria/zulip,LeeRisk/zulip,rht/zulip,voidException/zulip,littledogboy/zulip,jessedhillon/zulip,schatt/zulip,dotcool/zulip,amyliu345/zulip,tiansiyuan/zulip,LAndreas/zulip,praveenaki/zulip,KingxBanana/zulip,shubhamdhama/zulip,seapasulli/zulip,EasonYi/zulip,sup95/zulip,wweiradio/zulip,proliming/zulip,zofuthan/zulip,vakila/zulip,souravbadami/zulip,ericzhou2008/zulip,mahim97/zulip,punchagan/zulip,tommyip/zulip,ApsOps/zulip,Suninus/zulip,Vallher/zulip,zorojean/zulip,joyhchen/zulip,reyha/zulip,Cheppers/zulip,Vallher/zulip,m1ssou/zulip,tbutter/zulip,reyha/zulip,krtkmj/zulip,SmartPeople/zulip,proliming/zulip,zorojean/zulip,jrowan/zulip,wavelets/zulip,RobotCaleb/zulip,TigorC/zulip,bowlofstew/zulip,rht/zulip,jonesgithub/zulip,brainwane/zulip,jackrzhang/zulip,zwily/zulip,Drooids/zulip,Drooids/zulip,hj3938/zulip,samatdav/zulip,sonali0901/zulip,ryansnowboarder/zulip,shubhamdhama/zulip,easyfmxu/zulip,jonesgithub/zulip,akuseru/zulip,willingc/zulip,gkotian/zulip,so0k/zulip,proliming/zulip,dwrpayne/zulip,sharmaeklavya2/zulip,shrikrishnaholla/zulip,schatt/zulip,johnnygaddarr/zulip,tommyip/zulip,aakash-cr7/zulip,zachallaun/zulip,voidException/zulip,umkay/zulip,moria/zulip,arpitpanwar/zulip,jeffcao/zulip,pradiptad/zulip,levixie/zulip,xuanhan863/zulip,guiquanz/zulip,karamcnair/zulip,dhcrzf/zulip,lfranchi/zulip,ahmadassaf/zulip,isht3/zulip,bluesea/zulip,littledogboy/zulip,EasonYi/zulip,xuanhan863/zulip,voidException/zulip,Galexrt/zulip,bitemyapp/zulip,Cheppers/zulip,vabs22/zulip,hustlzp/zulip,ApsOps/zulip,johnnygaddarr/zulip,gigawhitlocks/zulip,luyifan/zulip,mansilladev/zulip,johnnygaddarr/zulip,Drooids/zulip,bowlofstew/zulip,gkotian/zulip,adnanh/zulip,blaze225/zulip,swinghu/zulip,easyfmxu/zulip,itnihao/zulip,bssrdf/zulip,esander91/zulip,xuanhan863/zulip,PhilSk/zulip,aps-sids/zulip,amanharitsh123/zulip,kou/zulip,vakila/zulip,LeeRisk/zulip,arpitpanwar/zulip,Juanvulcano/zulip,ApsOps/zulip,amyliu345/zulip,PaulPetring/zulip,kokoar/zulip,eeshangarg/zulip,dxq-git/zulip,ericzhou2008/zulip,wweiradio/zulip,Gabriel0402/zulip,bssrdf/zulip,swinghu/zulip,peiwei/zulip,joshisa/zulip,seapasulli/zulip,Suninus/zulip,akuseru/zulip,arpitpanwar/zulip,thomasboyt/zulip,mohsenSy/zulip,JanzTam/zulip,shrikrishnaholla/zulip,timabbott/zulip,glovebx/zulip,TigorC/zulip,grave-w-grave/zulip,JPJPJPOPOP/zulip,PaulPetring/zulip,arpith/zulip,shaunstanislaus/zulip,qq1012803704/zulip,synicalsyntax/zulip,natanovia/zulip,johnny9/zulip,hayderimran7/zulip,jackrzhang/zulip,christi3k/zulip,zulip/zulip,qq1012803704/zulip,paxapy/zulip,AZtheAsian/zulip,technicalpickles/zulip,developerfm/zulip,christi3k/zulip,esander91/zulip,praveenaki/zulip,AZtheAsian/zulip,developerfm/zulip,jphilipsen05/zulip,akuseru/zulip,DazWorrall/zulip,Suninus/zulip,dwrpayne/zulip,joyhchen/zulip,rishig/zulip,ahmadassaf/zulip,luyifan/zulip,Batterfii/zulip,susansls/zulip,hayderimran7/zulip,bssrdf/zulip,qq1012803704/zulip,xuxiao/zulip,RobotCaleb/zulip,ahmadassaf/zulip,ryansnowboarder/zulip,Jianchun1/zulip,armooo/zulip,shaunstanislaus/zulip,calvinleenyc/zulip,ikasumiwt/zulip,timabbott/zulip,glovebx/zulip,joshisa/zulip,he15his/zulip,showell/zulip,Suninus/zulip,krtkmj/zulip,AZtheAsian/zulip,levixie/zulip,arpith/zulip,levixie/zulip,ryansnowboarder/zulip,DazWorrall/zulip,SmartPeople/zulip,moria/zulip,LeeRisk/zulip,karamcnair/zulip,jimmy54/zulip,christi3k/zulip,AZtheAsian/zulip,arpitpanwar/zulip,johnny9/zulip,grave-w-grave/zulip,itnihao/zulip,Batterfii/zulip,niftynei/zulip,Galexrt/zulip,peguin40/zulip,technicalpickles/zulip,hafeez3000/zulip,punchagan/zulip,technicalpickles/zulip,guiquanz/zulip,ryanbackman/zulip,amallia/zulip,zhaoweigg/zulip,jimmy54/zulip,voidException/zulip,shaunstanislaus/zulip,joshisa/zulip,jonesgithub/zulip,karamcnair/zulip,suxinde2009/zulip,wdaher/zulip,seapasulli/zulip,timabbott/zulip,verma-varsha/zulip,deer-hope/zulip,hengqujushi/zulip,brockwhittaker/zulip,peguin40/zulip,mdavid/zulip,tiansiyuan/zulip,codeKonami/zulip,yuvipanda/zulip,Cheppers/zulip,zhaoweigg/zulip,wweiradio/zulip,bastianh/zulip,suxinde2009/zulip,brainwane/zulip,ryansnowboarder/zulip,wdaher/zulip,jimmy54/zulip,JanzTam/zulip,jessedhillon/zulip,susansls/zulip,Qgap/zulip,vikas-parashar/zulip,j831/zulip,jainayush975/zulip,wweiradio/zulip,ryanbackman/zulip,kaiyuanheshang/zulip,KJin99/zulip,Galexrt/zulip,hackerkid/zulip,MayB/zulip,jimmy54/zulip,bitemyapp/zulip,KingxBanana/zulip,yuvipanda/zulip,bastianh/zulip,dhcrzf/zulip,dwrpayne/zulip,Jianchun1/zulip,umkay/zulip,developerfm/zulip,zachallaun/zulip,johnny9/zulip,zhaoweigg/zulip,vakila/zulip,susansls/zulip,niftynei/zulip,DazWorrall/zulip,jackrzhang/zulip,bitemyapp/zulip,jainayush975/zulip,bssrdf/zulip,dxq-git/zulip,willingc/zulip,huangkebo/zulip,Juanvulcano/zulip,wweiradio/zulip,yocome/zulip,adnanh/zulip,eastlhu/zulip,themass/zulip,AZtheAsian/zulip,kokoar/zulip,gkotian/zulip,xuxiao/zulip,EasonYi/zulip,brockwhittaker/zulip,avastu/zulip,jrowan/zulip,j831/zulip,xuxiao/zulip,jeffcao/zulip,huangkebo/zulip,MariaFaBella85/zulip,glovebx/zulip,kaiyuanheshang/zulip,schatt/zulip,ericzhou2008/zulip,ryansnowboarder/zulip,suxinde2009/zulip,kaiyuanheshang/zulip,hafeez3000/zulip,jonesgithub/zulip,hustlzp/zulip,udxxabp/zulip,gigawhitlocks/zulip,hackerkid/zulip,kokoar/zulip,ipernet/zulip,Batterfii/zulip,zwily/zulip,bastianh/zulip,jeffcao/zulip,guiquanz/zulip,hj3938/zulip,nicholasbs/zulip,sharmaeklavya2/zulip,mansilladev/zulip,bitemyapp/zulip,willingc/zulip,natanovia/zulip,cosmicAsymmetry/zulip,xuanhan863/zulip,hackerkid/zulip,kou/zulip,hj3938/zulip,JanzTam/zulip,eastlhu/zulip,ufosky-server/zulip,bssrdf/zulip,ApsOps/zulip,sup95/zulip,hj3938/zulip,peiwei/zulip,guiquanz/zulip,hj3938/zulip,mdavid/zulip,themass/zulip,amyliu345/zulip,zacps/zulip,saitodisse/zulip,mansilladev/zulip,hustlzp/zulip,eeshangarg/zulip,eastlhu/zulip,fw1121/zulip,Cheppers/zulip,amanharitsh123/zulip,jrowan/zulip,shubhamdhama/zulip,bluesea/zulip,noroot/zulip,xuanhan863/zulip,KJin99/zulip,vikas-parashar/zulip,tommyip/zulip,huangkebo/zulip,synicalsyntax/zulip,hayderimran7/zulip,firstblade/zulip,shubhamdhama/zulip,technicalpickles/zulip,developerfm/zulip,avastu/zulip,armooo/zulip,brockwhittaker/zulip,j831/zulip,tommyip/zulip,hengqujushi/zulip,paxapy/zulip,MayB/zulip,mahim97/zulip,dhcrzf/zulip,vaidap/zulip,umkay/zulip,mdavid/zulip,dhcrzf/zulip,amanharitsh123/zulip,Frouk/zulip,seapasulli/zulip,natanovia/zulip,karamcnair/zulip,aps-sids/zulip,ryanbackman/zulip,MayB/zulip,verma-varsha/zulip,easyfmxu/zulip,dxq-git/zulip,sup95/zulip,themass/zulip,esander91/zulip,johnny9/zulip,ericzhou2008/zulip,andersk/zulip,xuxiao/zulip,mansilladev/zulip,guiquanz/zulip,littledogboy/zulip,zhaoweigg/zulip,zwily/zulip,itnihao/zulip,he15his/zulip,sharmaeklavya2/zulip,shrikrishnaholla/zulip,wangdeshui/zulip,itnihao/zulip,guiquanz/zulip,noroot/zulip,qq1012803704/zulip,atomic-labs/zulip,natanovia/zulip,swinghu/zulip,tbutter/zulip,fw1121/zulip,aps-sids/zulip,MariaFaBella85/zulip,zofuthan/zulip,rishig/zulip,dhcrzf/zulip,dwrpayne/zulip,deer-hope/zulip,suxinde2009/zulip,babbage/zulip,RobotCaleb/zulip,PaulPetring/zulip,PaulPetring/zulip,zulip/zulip,Cheppers/zulip,gkotian/zulip,mahim97/zulip,praveenaki/zulip,zhaoweigg/zulip,jerryge/zulip,Gabriel0402/zulip,JanzTam/zulip,johnnygaddarr/zulip,sup95/zulip,nicholasbs/zulip,Drooids/zulip,dawran6/zulip,MariaFaBella85/zulip,vakila/zulip,moria/zulip,joshisa/zulip,fw1121/zulip,shrikrishnaholla/zulip,ericzhou2008/zulip,shaunstanislaus/zulip,alliejones/zulip,mohsenSy/zulip,thomasboyt/zulip,Qgap/zulip,deer-hope/zulip,tdr130/zulip,wavelets/zulip,paxapy/zulip,ashwinirudrappa/zulip,he15his/zulip,xuxiao/zulip,nicholasbs/zulip,rht/zulip,brainwane/zulip,ikasumiwt/zulip,KJin99/zulip,calvinleenyc/zulip,voidException/zulip,johnnygaddarr/zulip,tiansiyuan/zulip,suxinde2009/zulip,jrowan/zulip,he15his/zulip,Qgap/zulip,dnmfarrell/zulip,praveenaki/zulip,shrikrishnaholla/zulip,noroot/zulip,KJin99/zulip,JPJPJPOPOP/zulip,kokoar/zulip,armooo/zulip,guiquanz/zulip,ashwinirudrappa/zulip,ufosky-server/zulip,willingc/zulip,LeeRisk/zulip,schatt/zulip,hafeez3000/zulip,calvinleenyc/zulip,zorojean/zulip,brockwhittaker/zulip,hafeez3000/zulip,hayderimran7/zulip,proliming/zulip,Diptanshu8/zulip,ericzhou2008/zulip,jphilipsen05/zulip,Drooids/zulip,blaze225/zulip,kou/zulip,dxq-git/zulip,krtkmj/zulip,amyliu345/zulip,aakash-cr7/zulip,arpith/zulip,luyifan/zulip,ipernet/zulip,thomasboyt/zulip,natanovia/zulip,thomasboyt/zulip,so0k/zulip,hackerkid/zulip,ufosky-server/zulip,grave-w-grave/zulip,KJin99/zulip,aps-sids/zulip,huangkebo/zulip,showell/zulip,dawran6/zulip,wdaher/zulip,Qgap/zulip,dotcool/zulip,proliming/zulip,Frouk/zulip,zofuthan/zulip,bitemyapp/zulip,adnanh/zulip,kaiyuanheshang/zulip,dotcool/zulip,firstblade/zulip,zachallaun/zulip,shubhamdhama/zulip,PaulPetring/zulip,ashwinirudrappa/zulip,sonali0901/zulip,seapasulli/zulip,Jianchun1/zulip,seapasulli/zulip,PhilSk/zulip,glovebx/zulip,stamhe/zulip,tdr130/zulip,proliming/zulip,nicholasbs/zulip,niftynei/zulip,Cheppers/zulip,yuvipanda/zulip,mansilladev/zulip,suxinde2009/zulip,Batterfii/zulip,synicalsyntax/zulip,jonesgithub/zulip,suxinde2009/zulip,eastlhu/zulip,PhilSk/zulip,vaidap/zulip,thomasboyt/zulip,mohsenSy/zulip,verma-varsha/zulip,amallia/zulip,hengqujushi/zulip,andersk/zulip,vikas-parashar/zulip,aakash-cr7/zulip,udxxabp/zulip,yocome/zulip,amallia/zulip,dawran6/zulip,willingc/zulip,samatdav/zulip,babbage/zulip,wdaher/zulip,mahim97/zulip,voidException/zulip,timabbott/zulip,timabbott/zulip,cosmicAsymmetry/zulip,dxq-git/zulip,atomic-labs/zulip,samatdav/zulip,susansls/zulip,jeffcao/zulip,kokoar/zulip,firstblade/zulip,aliceriot/zulip,brainwane/zulip,saitodisse/zulip,amallia/zulip,dnmfarrell/zulip,pradiptad/zulip,Qgap/zulip,bastianh/zulip,punchagan/zulip,punchagan/zulip,wangdeshui/zulip,wangdeshui/zulip,m1ssou/zulip,yocome/zulip,udxxabp/zulip,amanharitsh123/zulip,RobotCaleb/zulip,calvinleenyc/zulip,willingc/zulip,ipernet/zulip,dwrpayne/zulip,vakila/zulip,Gabriel0402/zulip,hafeez3000/zulip,glovebx/zulip,DazWorrall/zulip,sonali0901/zulip,JPJPJPOPOP/zulip,peiwei/zulip,niftynei/zulip,ryansnowboarder/zulip,Galexrt/zulip,jphilipsen05/zulip,LeeRisk/zulip,JPJPJPOPOP/zulip,bssrdf/zulip,bluesea/zulip,JPJPJPOPOP/zulip,vabs22/zulip,rishig/zulip,eastlhu/zulip,dattatreya303/zulip,mahim97/zulip,kokoar/zulip,krtkmj/zulip,samatdav/zulip,blaze225/zulip,calvinleenyc/zulip,dnmfarrell/zulip,hafeez3000/zulip,technicalpickles/zulip,jeffcao/zulip,proliming/zulip,zorojean/zulip,hj3938/zulip,ApsOps/zulip,jonesgithub/zulip,eastlhu/zulip,bitemyapp/zulip,hustlzp/zulip,deer-hope/zulip,KingxBanana/zulip,sonali0901/zulip,babbage/zulip,umkay/zulip,rishig/zulip,qq1012803704/zulip,tbutter/zulip,lfranchi/zulip,sonali0901/zulip,wavelets/zulip,xuanhan863/zulip,lfranchi/zulip,kou/zulip,joyhchen/zulip,lfranchi/zulip,adnanh/zulip,shrikrishnaholla/zulip,punchagan/zulip,andersk/zulip,TigorC/zulip,SmartPeople/zulip,johnny9/zulip,vaidap/zulip,schatt/zulip,dxq-git/zulip,mdavid/zulip,Suninus/zulip,gkotian/zulip,aakash-cr7/zulip,atomic-labs/zulip,showell/zulip,pradiptad/zulip,yocome/zulip,synicalsyntax/zulip,peiwei/zulip,DazWorrall/zulip,EasonYi/zulip,tbutter/zulip,Jianchun1/zulip,levixie/zulip,tdr130/zulip,rishig/zulip,ipernet/zulip,schatt/zulip,hengqujushi/zulip,fw1121/zulip,developerfm/zulip,hengqujushi/zulip,aliceriot/zulip,calvinleenyc/zulip,hengqujushi/zulip,hafeez3000/zulip,Gabriel0402/zulip,bitemyapp/zulip,wdaher/zulip,littledogboy/zulip,JPJPJPOPOP/zulip,peguin40/zulip,gigawhitlocks/zulip,aliceriot/zulip,brockwhittaker/zulip,rishig/zulip,PhilSk/zulip,johnny9/zulip,luyifan/zulip,Drooids/zulip,huangkebo/zulip,saitodisse/zulip,qq1012803704/zulip,TigorC/zulip,krtkmj/zulip,joshisa/zulip,LAndreas/zulip,ashwinirudrappa/zulip,glovebx/zulip,punchagan/zulip,reyha/zulip,zachallaun/zulip,eeshangarg/zulip,ahmadassaf/zulip,zorojean/zulip,dwrpayne/zulip,dhcrzf/zulip,armooo/zulip,tiansiyuan/zulip,j831/zulip,Galexrt/zulip,verma-varsha/zulip,sup95/zulip,samatdav/zulip,johnnygaddarr/zulip,yuvipanda/zulip,cosmicAsymmetry/zulip,pradiptad/zulip,dotcool/zulip,sharmaeklavya2/zulip,bastianh/zulip,themass/zulip,vabs22/zulip,so0k/zulip,tiansiyuan/zulip,nicholasbs/zulip,itnihao/zulip,blaze225/zulip,hengqujushi/zulip,Drooids/zulip,eeshangarg/zulip,zulip/zulip,lfranchi/zulip,krtkmj/zulip,atomic-labs/zulip,KJin99/zulip,amallia/zulip,nicholasbs/zulip,adnanh/zulip,jessedhillon/zulip,m1ssou/zulip,dattatreya303/zulip,akuseru/zulip,souravbadami/zulip,hackerkid/zulip,eeshangarg/zulip,firstblade/zulip,susansls/zulip,so0k/zulip,mansilladev/zulip,tommyip/zulip,LAndreas/zulip,ahmadassaf/zulip,akuseru/zulip,Diptanshu8/zulip,kou/zulip,easyfmxu/zulip,m1ssou/zulip,JanzTam/zulip,KingxBanana/zulip,Gabriel0402/zulip,christi3k/zulip,codeKonami/zulip,rishig/zulip,yuvipanda/zulip,noroot/zulip,stamhe/zulip,ikasumiwt/zulip,technicalpickles/zulip,Frouk/zulip,Frouk/zulip,dotcool/zulip,PhilSk/zulip,swinghu/zulip,jimmy54/zulip,ipernet/zulip,Cheppers/zulip,so0k/zulip,adnanh/zulip,ikasumiwt/zulip,Vallher/zulip,jrowan/zulip,xuxiao/zulip,lfranchi/zulip,fw1121/zulip,Vallher/zulip,RobotCaleb/zulip,tiansiyuan/zulip,joshisa/zulip,armooo/zulip,firstblade/zulip,ahmadassaf/zulip,akuseru/zulip,zacps/zulip,j831/zulip,ericzhou2008/zulip,EasonYi/zulip,MayB/zulip,peiwei/zulip,hackerkid/zulip,natanovia/zulip,ikasumiwt/zulip,aliceriot/zulip,stamhe/zulip,LAndreas/zulip,vaidap/zulip,alliejones/zulip,jphilipsen05/zulip,Juanvulcano/zulip,jackrzhang/zulip,jessedhillon/zulip,dattatreya303/zulip,so0k/zulip,noroot/zulip,joshisa/zulip,aliceriot/zulip,willingc/zulip,zofuthan/zulip,Qgap/zulip,jackrzhang/zulip,zorojean/zulip,SmartPeople/zulip,he15his/zulip,paxapy/zulip,hayderimran7/zulip,andersk/zulip,tdr130/zulip,arpitpanwar/zulip,MariaFaBella85/zulip,AZtheAsian/zulip,bluesea/zulip,wangdeshui/zulip,synicalsyntax/zulip,bluesea/zulip,ashwinirudrappa/zulip,wangdeshui/zulip,SmartPeople/zulip,LAndreas/zulip,MayB/zulip,Suninus/zulip,deer-hope/zulip,armooo/zulip,he15his/zulip,jimmy54/zulip,andersk/zulip,themass/zulip,LeeRisk/zulip,levixie/zulip,Frouk/zulip,brockwhittaker/zulip,wweiradio/zulip,vakila/zulip,EasonYi/zulip,noroot/zulip,aliceriot/zulip,wdaher/zulip,voidException/zulip,Vallher/zulip,easyfmxu/zulip,gigawhitlocks/zulip,firstblade/zulip,souravbadami/zulip,sonali0901/zulip,peiwei/zulip,karamcnair/zulip,brainwane/zulip,showell/zulip,amallia/zulip,Galexrt/zulip,zulip/zulip,codeKonami/zulip,qq1012803704/zulip,jrowan/zulip,yocome/zulip,jerryge/zulip,PaulPetring/zulip,saitodisse/zulip,gkotian/zulip,developerfm/zulip,JanzTam/zulip,dxq-git/zulip,moria/zulip,eeshangarg/zulip,bastianh/zulip,codeKonami/zulip,ashwinirudrappa/zulip,luyifan/zulip,avastu/zulip,amanharitsh123/zulip,akuseru/zulip,codeKonami/zulip,alliejones/zulip,reyha/zulip,thomasboyt/zulip,LAndreas/zulip,zofuthan/zulip,jimmy54/zulip,udxxabp/zulip,joyhchen/zulip,jainayush975/zulip,wweiradio/zulip,ufosky-server/zulip,RobotCaleb/zulip,MayB/zulip,showell/zulip,itnihao/zulip,Frouk/zulip,dhcrzf/zulip,luyifan/zulip,udxxabp/zulip | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"]),
('share/humbug/integrations/trac', glob.glob('integrations/trac/*')),
('share/humbug/integrations/nagios', glob.glob('integrations/nagios/*')),
],
scripts=["bin/humbug-send"],
)
api: Fix need to manually update list of integrations.
(imported from commit 6842230f939483d32acb023ad38c53cb627df149) | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"]),
('share/humbug/integrations/trac', glob.glob('integrations/trac/*')),
('share/humbug/integrations/nagios', glob.glob('integrations/nagios/*')),
],
scripts=["bin/humbug-send"],
)
<commit_msg>api: Fix need to manually update list of integrations.
(imported from commit 6842230f939483d32acb023ad38c53cb627df149)<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"]),
('share/humbug/integrations/trac', glob.glob('integrations/trac/*')),
('share/humbug/integrations/nagios', glob.glob('integrations/nagios/*')),
],
scripts=["bin/humbug-send"],
)
api: Fix need to manually update list of integrations.
(imported from commit 6842230f939483d32acb023ad38c53cb627df149)#!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"]),
('share/humbug/integrations/trac', glob.glob('integrations/trac/*')),
('share/humbug/integrations/nagios', glob.glob('integrations/nagios/*')),
],
scripts=["bin/humbug-send"],
)
<commit_msg>api: Fix need to manually update list of integrations.
(imported from commit 6842230f939483d32acb023ad38c53cb627df149)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import humbug
import glob
import os.path
from distutils.core import setup
setup(name='humbug',
version=humbug.__version__,
description='Bindings for the Humbug message API',
author='Humbug, Inc.',
author_email='humbug@humbughq.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Communications :: Chat',
],
url='https://humbughq.com/dist/api/',
packages=['humbug'],
data_files=[('share/humbug/examples', ["examples/humbugrc", "examples/send-message"])] + \
[(os.path.join('share/humbug/', relpath),
glob.glob(os.path.join(relpath, '*'))) for relpath in
glob.glob("integrations/*")
],
scripts=["bin/humbug-send"],
)
|
4e3351486b88a8cec60279ff3182565921caec0d | website_portal_v10/__openerp__.py | website_portal_v10/__openerp__.py | {
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'website': 'https://www.odoo.com/',
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
| {
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
| Remove 'author' and 'website' on odoo modules' manisfest | [IMP] Remove 'author' and 'website' on odoo modules' manisfest
And use the default values :
- author : 'Odoo S.A.'
- website: https://www.odoo.com/
| Python | agpl-3.0 | Tecnativa/website,JayVora-SerpentCS/website,RoelAdriaans-B-informed/website,JayVora-SerpentCS/website,JayVora-SerpentCS/website,nicolas-petit/website,RoelAdriaans-B-informed/website,khaeusler/website,khaeusler/website,Tecnativa/website,RoelAdriaans-B-informed/website,nicolas-petit/website,khaeusler/website,nicolas-petit/website,Tecnativa/website,RoelAdriaans-B-informed/website | {
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'website': 'https://www.odoo.com/',
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
[IMP] Remove 'author' and 'website' on odoo modules' manisfest
And use the default values :
- author : 'Odoo S.A.'
- website: https://www.odoo.com/ | {
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
| <commit_before>{
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'website': 'https://www.odoo.com/',
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
<commit_msg>[IMP] Remove 'author' and 'website' on odoo modules' manisfest
And use the default values :
- author : 'Odoo S.A.'
- website: https://www.odoo.com/<commit_after> | {
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
| {
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'website': 'https://www.odoo.com/',
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
[IMP] Remove 'author' and 'website' on odoo modules' manisfest
And use the default values :
- author : 'Odoo S.A.'
- website: https://www.odoo.com/{
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
| <commit_before>{
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'website': 'https://www.odoo.com/',
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
<commit_msg>[IMP] Remove 'author' and 'website' on odoo modules' manisfest
And use the default values :
- author : 'Odoo S.A.'
- website: https://www.odoo.com/<commit_after>{
'name': 'Website Portal',
'category': 'Website',
'summary': 'Account Management Frontend for your Customers',
'version': '1.0',
'description': """
Allows your customers to manage their account from a beautiful web interface.
""",
'depends': [
'website',
],
'data': [
'views/templates.xml',
],
'installable': True,
}
|
4532912e02761ef5b0209e866107987216a6e98d | compress/filters/yui/__init__.py | compress/filters/yui/__init__.py | import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_CSS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_JS_ARGUMENTS) | import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_JS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_CSS_ARGUMENTS) | Fix YUI arg passing, had CSS/JS flipped | Fix YUI arg passing, had CSS/JS flipped
| Python | mit | cyberdelia/django-pipeline,sjhewitt/django-pipeline,perdona/django-pipeline,sideffect0/django-pipeline,adamcharnock/django-pipeline,jwatson/django-pipeline,kronion/django-pipeline,lydell/django-pipeline,apendleton/django-pipeline,edx/django-pipeline,yuvadm/django-pipeline,novapost/django-pipeline,floppym/django-pipeline,botify-labs/django-pipeline,camilonova/django-pipeline,fabiosantoscode/django-pipeline,beedesk/django-pipeline,ei-grad/django-pipeline,tayfun/django-pipeline,skirsdeda/django-pipeline,novapost/django-pipeline,vbabiy/django-pipeline,botify-labs/django-pipeline,TwigWorld/django-pipeline,pombredanne/django-pipeline-1,cyberdelia/django-pipeline,vbabiy/django-pipeline,jazzband/django-pipeline,zapier/django-pipeline,leonardoo/django-pipeline,hyperoslo/django-pipeline,leonardoo/django-pipeline,jensenbox/django-pipeline,zapier/django-pipeline,pdr/django-pipeline,floppym/django-pipeline,wienczny/django-pipeline,edwinlunando/django-pipeline,apendleton/django-pipeline,kronion/django-pipeline,vstoykov/django-pipeline,joshkehn/django-pipeline,theatlantic/django-pipeline,jensenbox/django-pipeline,hyperoslo/django-pipeline,lexqt/django-pipeline,Kobold/django-pipeline,perdona/django-pipeline,demux/django-pipeline,tayfun/django-pipeline,airtonix/django-pipeline,almost/django-pipeline,edwinlunando/django-pipeline,Tekco/django-pipeline,novapost/django-pipeline,sideffect0/django-pipeline,Tekco/django-pipeline,joshkehn/django-pipeline,almost/django-pipeline,cyberdelia/django-pipeline,necaris/django-pipeline,airtonix/django-pipeline,letolab/django-pipeline,chipx86/django-pipeline,leonardoo/django-pipeline,pombredanne/django-pipeline-1,caioariede/django-pipeline,zapier/django-pipeline,ei-grad/django-pipeline,lydell/django-pipeline,edx/django-pipeline,mgorny/django-pipeline,apendleton/django-pipeline,wienczny/django-pipeline,skolsuper/django-pipeline,wienczny/django-pipeline,d9pouces/django-pipeline,Kobold/django-pipeline,adamcharnock/django-pipeline,jazzband/django-pipeline,fahhem/django-pipeline,necaris/django-pipeline,tayfun/django-pipeline,lexqt/django-pipeline,simudream/django-pipeline,beedesk/django-pipeline,simudream/django-pipeline,lexqt/django-pipeline,theatlantic/django-pipeline,caioariede/django-pipeline,jensenbox/django-pipeline,hyperoslo/django-pipeline,camilonova/django-pipeline,caioariede/django-pipeline,pdr/django-pipeline,chipx86/django-pipeline,demux/django-pipeline,d9pouces/django-pipeline,skirsdeda/django-pipeline,Nivl/django-pipeline,teozkr/django-pipeline,jwatson/django-pipeline,Kobold/django-pipeline,jazzband/django-pipeline,vstoykov/django-pipeline,edwinlunando/django-pipeline,kronion/django-pipeline,theatlantic/django-pipeline,adamcharnock/django-pipeline,mweibel/django-pipeline,mweibel/django-pipeline,Kami/django-pipeline,skolsuper/django-pipeline,Kami/django-pipeline,TwigWorld/django-pipeline,mgorny/django-pipeline,fabiosantoscode/django-pipeline,fahhem/django-pipeline,d9pouces/django-pipeline,skolsuper/django-pipeline,lydell/django-pipeline,TwigWorld/django-pipeline,beedesk/django-pipeline,mgorny/django-pipeline,sjhewitt/django-pipeline,botify-labs/django-pipeline,edx/django-pipeline,simudream/django-pipeline,letolab/django-pipeline,vbabiy/django-pipeline,sjhewitt/django-pipeline,sideffect0/django-pipeline,floppym/django-pipeline,yuvadm/django-pipeline,Kami/django-pipeline,perdona/django-pipeline,chipx86/django-pipeline,teozkr/django-pipeline,yuvadm/django-pipeline,almost/django-pipeline,joshkehn/django-pipeline,camilonova/django-pipeline,Nivl/django-pipeline,jwatson/django-pipeline,demux/django-pipeline,Tekco/django-pipeline,skirsdeda/django-pipeline,ei-grad/django-pipeline | import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_CSS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_JS_ARGUMENTS)Fix YUI arg passing, had CSS/JS flipped | import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_JS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_CSS_ARGUMENTS) | <commit_before>import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_CSS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_JS_ARGUMENTS)<commit_msg>Fix YUI arg passing, had CSS/JS flipped<commit_after> | import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_JS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_CSS_ARGUMENTS) | import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_CSS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_JS_ARGUMENTS)Fix YUI arg passing, had CSS/JS flippedimport subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_JS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_CSS_ARGUMENTS) | <commit_before>import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_CSS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_JS_ARGUMENTS)<commit_msg>Fix YUI arg passing, had CSS/JS flipped<commit_after>import subprocess
from compress.conf import settings
from compress.filter_base import FilterBase, FilterError
class YUICompressorFilter(FilterBase):
def filter_common(self, content, type_, arguments):
command = '%s --type=%s %s' % (settings.COMPRESS_YUI_BINARY, type_, arguments)
if self.verbose:
command += ' --verbose'
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, \
stdin=subprocess.PIPE, stderr=subprocess.PIPE)
p.stdin.write(content)
p.stdin.close()
filtered_css = p.stdout.read()
p.stdout.close()
err = p.stderr.read()
p.stderr.close()
if p.wait() != 0:
if not err:
err = 'Unable to apply YUI Compressor filter'
raise FilterError(err)
if self.verbose:
print err
return filtered_css
def filter_js(self, js):
return self.filter_common(js, 'js', settings.COMPRESS_YUI_JS_ARGUMENTS)
def filter_css(self, css):
return self.filter_common(css, 'css', settings.COMPRESS_YUI_CSS_ARGUMENTS) |
b14e605c83f95e6e1a3c70f148c32bbdc0ca12b1 | zeus/api/resources/build_index.py | zeus/api/resources/build_index.py | from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc()).limit(100)
return self.respond_with_schema(builds_schema, query)
| from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc())
return self.paginate_with_schema(builds_schema, query)
| Add pagination to build index | feat: Add pagination to build index
| Python | apache-2.0 | getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus | from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc()).limit(100)
return self.respond_with_schema(builds_schema, query)
feat: Add pagination to build index | from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc())
return self.paginate_with_schema(builds_schema, query)
| <commit_before>from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc()).limit(100)
return self.respond_with_schema(builds_schema, query)
<commit_msg>feat: Add pagination to build index<commit_after> | from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc())
return self.paginate_with_schema(builds_schema, query)
| from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc()).limit(100)
return self.respond_with_schema(builds_schema, query)
feat: Add pagination to build indexfrom sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc())
return self.paginate_with_schema(builds_schema, query)
| <commit_before>from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc()).limit(100)
return self.respond_with_schema(builds_schema, query)
<commit_msg>feat: Add pagination to build index<commit_after>from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc())
return self.paginate_with_schema(builds_schema, query)
|
04a7de877c50bc84428e7bb7d30b1c6cac00a59f | ipywidgets/widgets/tests/test_widget_selection.py | ipywidgets/widgets/tests/test_widget_selection.py | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("module")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "deprecated" in str(w[-1].message)
| # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) > 0
assert issubclass(w[-1].category, DeprecationWarning)
assert "Support for mapping types has been deprecated" in str(w[-1].message)
| Use simplefilter('always') for testing the warning | Use simplefilter('always') for testing the warning
* Use `warnings.simplefilter('always')` for DeprecationWarning
* More specific test on warning message
| Python | bsd-3-clause | jupyter-widgets/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("module")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "deprecated" in str(w[-1].message)
Use simplefilter('always') for testing the warning
* Use `warnings.simplefilter('always')` for DeprecationWarning
* More specific test on warning message | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) > 0
assert issubclass(w[-1].category, DeprecationWarning)
assert "Support for mapping types has been deprecated" in str(w[-1].message)
| <commit_before># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("module")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "deprecated" in str(w[-1].message)
<commit_msg>Use simplefilter('always') for testing the warning
* Use `warnings.simplefilter('always')` for DeprecationWarning
* More specific test on warning message<commit_after> | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) > 0
assert issubclass(w[-1].category, DeprecationWarning)
assert "Support for mapping types has been deprecated" in str(w[-1].message)
| # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("module")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "deprecated" in str(w[-1].message)
Use simplefilter('always') for testing the warning
* Use `warnings.simplefilter('always')` for DeprecationWarning
* More specific test on warning message# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) > 0
assert issubclass(w[-1].category, DeprecationWarning)
assert "Support for mapping types has been deprecated" in str(w[-1].message)
| <commit_before># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("module")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) == 1
assert issubclass(w[-1].category, DeprecationWarning)
assert "deprecated" in str(w[-1].message)
<commit_msg>Use simplefilter('always') for testing the warning
* Use `warnings.simplefilter('always')` for DeprecationWarning
* More specific test on warning message<commit_after># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import warnings
from unittest import TestCase
from ipywidgets import Dropdown
class TestDropdown(TestCase):
def test_construction(self):
Dropdown()
def test_deprecation_warning_mapping_options(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
Dropdown(options={'One': 1, 'Two': 2, 'Three': 3})
assert len(w) > 0
assert issubclass(w[-1].category, DeprecationWarning)
assert "Support for mapping types has been deprecated" in str(w[-1].message)
|
81978240e48dbaac2567054b33617a1acabbb695 | corehq/apps/app_manager/tasks.py | corehq/apps/app_manager/tasks.py | from celery.task import task
from corehq.apps.users.models import CommCareUser
@task
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
| from celery.task import task
from corehq.apps.users.models import CommCareUser
@task(queue='background_queue')
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
| Use background queue for creating user cases | Use background queue for creating user cases
| Python | bsd-3-clause | qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq | from celery.task import task
from corehq.apps.users.models import CommCareUser
@task
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
Use background queue for creating user cases | from celery.task import task
from corehq.apps.users.models import CommCareUser
@task(queue='background_queue')
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
| <commit_before>from celery.task import task
from corehq.apps.users.models import CommCareUser
@task
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
<commit_msg>Use background queue for creating user cases<commit_after> | from celery.task import task
from corehq.apps.users.models import CommCareUser
@task(queue='background_queue')
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
| from celery.task import task
from corehq.apps.users.models import CommCareUser
@task
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
Use background queue for creating user casesfrom celery.task import task
from corehq.apps.users.models import CommCareUser
@task(queue='background_queue')
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
| <commit_before>from celery.task import task
from corehq.apps.users.models import CommCareUser
@task
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
<commit_msg>Use background queue for creating user cases<commit_after>from celery.task import task
from corehq.apps.users.models import CommCareUser
@task(queue='background_queue')
def create_user_cases(domain_name):
from corehq.apps.callcenter.utils import sync_usercase
for user in CommCareUser.by_domain(domain_name):
sync_usercase(user)
|
c5001c6f6dab2639fdeb5735f4d4f6f7b8d35395 | pamqp/body.py | pamqp/body.py | # -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
class ContentBody(object):
"""ContentBody carries the value for an AMQP message body frame"""
name = 'ContentBody'
def __init__(self, value=None):
"""Create a new instance of a ContentBody object, passing in the value
of the message body
:param str|unicode|bytes value: The content body
"""
self.value = value
def __len__(self):
"""Return the length of the content body value
:rtype: int
"""
return len(self.value)
def marshal(self):
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
return self.value
def unmarshal(self, data):
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
self.value = data
| # -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
import typing
class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
def __init__(self, value: typing.Optional[bytes] = None):
"""Create a new instance of a ContentBody object"""
self.value = value
def __len__(self) -> int:
"""Return the length of the content body value"""
return len(self.value) if self.value else 0
def marshal(self) -> bytes:
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marshaling for the payload in a
content frame.
"""
return self.value
def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
"""
self.value = data
| Update to include typing, cleanup docstrings and code | Update to include typing, cleanup docstrings and code
| Python | bsd-3-clause | gmr/pamqp | # -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
class ContentBody(object):
"""ContentBody carries the value for an AMQP message body frame"""
name = 'ContentBody'
def __init__(self, value=None):
"""Create a new instance of a ContentBody object, passing in the value
of the message body
:param str|unicode|bytes value: The content body
"""
self.value = value
def __len__(self):
"""Return the length of the content body value
:rtype: int
"""
return len(self.value)
def marshal(self):
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
return self.value
def unmarshal(self, data):
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
self.value = data
Update to include typing, cleanup docstrings and code | # -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
import typing
class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
def __init__(self, value: typing.Optional[bytes] = None):
"""Create a new instance of a ContentBody object"""
self.value = value
def __len__(self) -> int:
"""Return the length of the content body value"""
return len(self.value) if self.value else 0
def marshal(self) -> bytes:
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marshaling for the payload in a
content frame.
"""
return self.value
def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
"""
self.value = data
| <commit_before># -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
class ContentBody(object):
"""ContentBody carries the value for an AMQP message body frame"""
name = 'ContentBody'
def __init__(self, value=None):
"""Create a new instance of a ContentBody object, passing in the value
of the message body
:param str|unicode|bytes value: The content body
"""
self.value = value
def __len__(self):
"""Return the length of the content body value
:rtype: int
"""
return len(self.value)
def marshal(self):
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
return self.value
def unmarshal(self, data):
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
self.value = data
<commit_msg>Update to include typing, cleanup docstrings and code<commit_after> | # -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
import typing
class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
def __init__(self, value: typing.Optional[bytes] = None):
"""Create a new instance of a ContentBody object"""
self.value = value
def __len__(self) -> int:
"""Return the length of the content body value"""
return len(self.value) if self.value else 0
def marshal(self) -> bytes:
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marshaling for the payload in a
content frame.
"""
return self.value
def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
"""
self.value = data
| # -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
class ContentBody(object):
"""ContentBody carries the value for an AMQP message body frame"""
name = 'ContentBody'
def __init__(self, value=None):
"""Create a new instance of a ContentBody object, passing in the value
of the message body
:param str|unicode|bytes value: The content body
"""
self.value = value
def __len__(self):
"""Return the length of the content body value
:rtype: int
"""
return len(self.value)
def marshal(self):
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
return self.value
def unmarshal(self, data):
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
self.value = data
Update to include typing, cleanup docstrings and code# -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
import typing
class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
def __init__(self, value: typing.Optional[bytes] = None):
"""Create a new instance of a ContentBody object"""
self.value = value
def __len__(self) -> int:
"""Return the length of the content body value"""
return len(self.value) if self.value else 0
def marshal(self) -> bytes:
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marshaling for the payload in a
content frame.
"""
return self.value
def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
"""
self.value = data
| <commit_before># -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
class ContentBody(object):
"""ContentBody carries the value for an AMQP message body frame"""
name = 'ContentBody'
def __init__(self, value=None):
"""Create a new instance of a ContentBody object, passing in the value
of the message body
:param str|unicode|bytes value: The content body
"""
self.value = value
def __len__(self):
"""Return the length of the content body value
:rtype: int
"""
return len(self.value)
def marshal(self):
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
return self.value
def unmarshal(self, data):
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
:rtype: str|unicode|bytes
"""
self.value = data
<commit_msg>Update to include typing, cleanup docstrings and code<commit_after># -*- encoding: utf-8 -*-
"""
The pamqp.body module contains the Body class which is used when
unmarshaling body frames. When dealing with content frames, the message body
will be returned from the library as an instance of the body class.
"""
import typing
class ContentBody:
"""ContentBody carries the value for an AMQP message body frame"""
def __init__(self, value: typing.Optional[bytes] = None):
"""Create a new instance of a ContentBody object"""
self.value = value
def __len__(self) -> int:
"""Return the length of the content body value"""
return len(self.value) if self.value else 0
def marshal(self) -> bytes:
"""Return the marshaled content body. This method is here for API
compatibility, there is no special marshaling for the payload in a
content frame.
"""
return self.value
def unmarshal(self, data: bytes) -> typing.NoReturn:
"""Apply the data to the object. This method is here for API
compatibility, there is no special unmarhsaling for the payload in a
content frame.
"""
self.value = data
|
f2f3ed4d735bd12956b5e4915118fc40de11d33a | src/files_create_datetree.py | src/files_create_datetree.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
KmdFiles.removeEmptyFolders(self.args.source[0], self.args.doit)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
| Clean up dir after move | Clean up dir after move
| Python | mit | pzia/keepmydatas | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
Clean up dir after move | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
KmdFiles.removeEmptyFolders(self.args.source[0], self.args.doit)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
<commit_msg>Clean up dir after move<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
KmdFiles.removeEmptyFolders(self.args.source[0], self.args.doit)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
Clean up dir after move#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
KmdFiles.removeEmptyFolders(self.args.source[0], self.args.doit)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
<commit_msg>Clean up dir after move<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse source tree, get old files and move files info a new folder tree"""
import KmdCmd
import KmdFiles
import os
import re
import logging
class KmdFilesMove(KmdCmd.KmdCommand):
regexp = None
def extendParser(self):
super(KmdFilesMove, self).extendParser()
#Extend parser
self.parser.add_argument('source', metavar='</path/to/tree>', nargs=1, help='The source tree')
self.parser.add_argument('tree', metavar='</path/to/dest>', nargs=1, help='Folder to put matching files')
self.parser.add_argument('age', metavar='</path/to/dest>', nargs=1, help='age')
def run(self):
logging.info("Parsing %s", self.args.source[0])
for root, dirs, files in os.walk(self.args.source[0]):
logging.debug("Walking in %s", root)
for name in files:
pname = os.path.join(root, name)
dname = os.path.join(self.args.tree[0])
try :
KmdFiles.fileMoveRenameToDirIfOld(pname, dname, int(self.args.age[0]), self.args.doit)
except :
logging.error("Bad move from %s to %s", pname, dname)
KmdFiles.removeEmptyFolders(self.args.source[0], self.args.doit)
if __name__ == "__main__":
cmd = KmdFilesMove(__doc__)
cmd.run()
|
f9698eb96ca0c69a9d41a2d19a56af83e74da949 | examples/advanced/extend_python.py | examples/advanced/extend_python.py | """
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from python_parser import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
| """
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from lark.indenter import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
| Fix confusing import (no change in functionality) | Fix confusing import (no change in functionality) | Python | mit | lark-parser/lark | """
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from python_parser import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
Fix confusing import (no change in functionality) | """
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from lark.indenter import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
| <commit_before>"""
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from python_parser import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
<commit_msg>Fix confusing import (no change in functionality)<commit_after> | """
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from lark.indenter import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
| """
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from python_parser import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
Fix confusing import (no change in functionality)"""
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from lark.indenter import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
| <commit_before>"""
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from python_parser import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
<commit_msg>Fix confusing import (no change in functionality)<commit_after>"""
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from lark.indenter import PythonIndenter
GRAMMAR = r"""
%import python (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
|
f54c792b5bd79dedca275199d1e0d922f73620e9 | python/protein-translation/protein_translation.py | python/protein-translation/protein_translation.py | # Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCC, UCC | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
| # Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
| Fix mapping for codon keys for Serine | Fix mapping for codon keys for Serine
| Python | mit | rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism | # Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCC, UCC | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
Fix mapping for codon keys for Serine | # Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
| <commit_before># Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCC, UCC | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
<commit_msg>Fix mapping for codon keys for Serine<commit_after> | # Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
| # Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCC, UCC | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
Fix mapping for codon keys for Serine# Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
| <commit_before># Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCC, UCC | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UCC": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
<commit_msg>Fix mapping for codon keys for Serine<commit_after># Codon | Protein
# :--- | :---
# AUG | Methionine
# UUU, UUC | Phenylalanine
# UUA, UUG | Leucine
# UCU, UCC, UCA, UCG | Serine
# UAU, UAC | Tyrosine
# UGU, UGC | Cysteine
# UGG | Tryptophan
# UAA, UAG, UGA | STOP
CODON_TO_PROTEIN = {
"AUG": "Methionine",
"UUU": "Phenylalanine",
"UUC": "Phenylalanine",
"UUA": "Leucine",
"UUG": "Leucine",
"UCU": "Serine",
"UCC": "Serine",
"UCA": "Serine",
"UCG": "Serine",
"UGU": "Cysteine",
"UGC": "Cysteine",
"UGG": "Tryptophan",
"UAA": "STOP",
"UAG": "STOP",
"UGA": "STOP"
}
def proteins(strand):
return [CODON_TO_PROTEIN[strand]]
|
bed65ca5a3af883a90cdc869dbfdaf08ac4ba40e | company/configurations_api.py | company/configurations_api.py | from ..cw_controller import CWController
# Class for /company/configurations
from connectpyse.company import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
| from ..cw_controller import CWController
# Class for /company/configurations
from . import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
| Fix config api class again | Fix config api class again
| Python | mit | joshuamsmith/ConnectPyse | from ..cw_controller import CWController
# Class for /company/configurations
from connectpyse.company import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
Fix config api class again | from ..cw_controller import CWController
# Class for /company/configurations
from . import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
| <commit_before>from ..cw_controller import CWController
# Class for /company/configurations
from connectpyse.company import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
<commit_msg>Fix config api class again<commit_after> | from ..cw_controller import CWController
# Class for /company/configurations
from . import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
| from ..cw_controller import CWController
# Class for /company/configurations
from connectpyse.company import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
Fix config api class againfrom ..cw_controller import CWController
# Class for /company/configurations
from . import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
| <commit_before>from ..cw_controller import CWController
# Class for /company/configurations
from connectpyse.company import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
<commit_msg>Fix config api class again<commit_after>from ..cw_controller import CWController
# Class for /company/configurations
from . import configuration
class ConfigurationsAPI(CWController):
def __init__(self):
self.module_url = 'company'
self.module = 'configurations'
self._class = configuration.Configuration
super().__init__() # instance gets passed to parent object
def get_configurations(self):
return super()._get()
def create_configuration(self, a_configuration):
return super()._create(a_configuration)
def get_configurations_count(self):
return super()._get_count()
def get_configuration_by_id(self, configuration_id):
return super()._get_by_id(configuration_id)
def delete_configuration_by_id(self, configuration_id):
super()._delete_by_id(configuration_id)
def replace_configuration(self, configuration_id):
pass
def update_configuration(self, configuration_id, key, value):
return super()._update(configuration_id, key, value)
|
391c1681eaeabfdbe65a64a1bb8b05beca30141e | wqflask/utility/db_tools.py | wqflask/utility/db_tools.py | from MySQLdb import escape_string as escape
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
escaped = [escape(str(item)) for item in items]
#print("escaped is:", escaped)
return escaped
| from MySQLdb import escape_string as escape_
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
return [escape_(str(item)).decode('utf8') for item in items]
def escape(string_):
return escape_(string_).decode('utf8')
| Add global method to convert binary string to plain string | Add global method to convert binary string to plain string
* wqflask/utility/db_tools.py: escape_string returns a binary string which
introduces a bug when composing sql query string. The escaped strings have to be
converted to plain text.
| Python | agpl-3.0 | pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2 | from MySQLdb import escape_string as escape
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
escaped = [escape(str(item)) for item in items]
#print("escaped is:", escaped)
return escaped
Add global method to convert binary string to plain string
* wqflask/utility/db_tools.py: escape_string returns a binary string which
introduces a bug when composing sql query string. The escaped strings have to be
converted to plain text. | from MySQLdb import escape_string as escape_
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
return [escape_(str(item)).decode('utf8') for item in items]
def escape(string_):
return escape_(string_).decode('utf8')
| <commit_before>from MySQLdb import escape_string as escape
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
escaped = [escape(str(item)) for item in items]
#print("escaped is:", escaped)
return escaped
<commit_msg>Add global method to convert binary string to plain string
* wqflask/utility/db_tools.py: escape_string returns a binary string which
introduces a bug when composing sql query string. The escaped strings have to be
converted to plain text.<commit_after> | from MySQLdb import escape_string as escape_
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
return [escape_(str(item)).decode('utf8') for item in items]
def escape(string_):
return escape_(string_).decode('utf8')
| from MySQLdb import escape_string as escape
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
escaped = [escape(str(item)) for item in items]
#print("escaped is:", escaped)
return escaped
Add global method to convert binary string to plain string
* wqflask/utility/db_tools.py: escape_string returns a binary string which
introduces a bug when composing sql query string. The escaped strings have to be
converted to plain text.from MySQLdb import escape_string as escape_
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
return [escape_(str(item)).decode('utf8') for item in items]
def escape(string_):
return escape_(string_).decode('utf8')
| <commit_before>from MySQLdb import escape_string as escape
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
escaped = [escape(str(item)) for item in items]
#print("escaped is:", escaped)
return escaped
<commit_msg>Add global method to convert binary string to plain string
* wqflask/utility/db_tools.py: escape_string returns a binary string which
introduces a bug when composing sql query string. The escaped strings have to be
converted to plain text.<commit_after>from MySQLdb import escape_string as escape_
def create_in_clause(items):
"""Create an in clause for mysql"""
in_clause = ', '.join("'{}'".format(x) for x in mescape(*items))
in_clause = '( {} )'.format(in_clause)
return in_clause
def mescape(*items):
"""Multiple escape"""
return [escape_(str(item)).decode('utf8') for item in items]
def escape(string_):
return escape_(string_).decode('utf8')
|
d4a7a69654e9e055c309762340e7aa4a722ca1f1 | mass_mailing_switzerland/models/crm_event_compassion.py | mass_mailing_switzerland/models/crm_event_compassion.py | ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
new_event.origin_id.campaign_id = new_event.campaign_id
return res
| ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
if new_event.analytic_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
if new_event.origin_id:
new_event.origin_id.campaign_id = new_event.campaign_id
return res
| FIX bug in event creation | FIX bug in event creation
| Python | agpl-3.0 | eicher31/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland | ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
new_event.origin_id.campaign_id = new_event.campaign_id
return res
FIX bug in event creation | ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
if new_event.analytic_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
if new_event.origin_id:
new_event.origin_id.campaign_id = new_event.campaign_id
return res
| <commit_before>##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
new_event.origin_id.campaign_id = new_event.campaign_id
return res
<commit_msg>FIX bug in event creation<commit_after> | ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
if new_event.analytic_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
if new_event.origin_id:
new_event.origin_id.campaign_id = new_event.campaign_id
return res
| ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
new_event.origin_id.campaign_id = new_event.campaign_id
return res
FIX bug in event creation##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
if new_event.analytic_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
if new_event.origin_id:
new_event.origin_id.campaign_id = new_event.campaign_id
return res
| <commit_before>##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
new_event.origin_id.campaign_id = new_event.campaign_id
return res
<commit_msg>FIX bug in event creation<commit_after>##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
if new_event.analytic_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
if new_event.origin_id:
new_event.origin_id.campaign_id = new_event.campaign_id
return res
|
00f83dad3a0cec2bccb4de878b477bbcf850e52d | core/datatypes/url.py | core/datatypes/url.py | import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if not is_url(self.value):
raise ValidationError("Invalid URL (is_url={}): {}".format(is_url(self.value), self.value))
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
| import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
| Raise exception on invalid URL | Raise exception on invalid URL
| Python | apache-2.0 | yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti | import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if not is_url(self.value):
raise ValidationError("Invalid URL (is_url={}): {}".format(is_url(self.value), self.value))
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
Raise exception on invalid URL | import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
| <commit_before>import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if not is_url(self.value):
raise ValidationError("Invalid URL (is_url={}): {}".format(is_url(self.value), self.value))
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
<commit_msg>Raise exception on invalid URL<commit_after> | import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
| import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if not is_url(self.value):
raise ValidationError("Invalid URL (is_url={}): {}".format(is_url(self.value), self.value))
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
Raise exception on invalid URLimport re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
| <commit_before>import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if not is_url(self.value):
raise ValidationError("Invalid URL (is_url={}): {}".format(is_url(self.value), self.value))
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
<commit_msg>Raise exception on invalid URL<commit_after>import re
from mongoengine import *
import urlnorm
from core.datatypes import Element
from core.helpers import is_url
class Url(Element):
def clean(self):
"""Ensures that URLs are canonized before saving"""
try:
if re.match("[a-zA-Z]+://", self.value) is None:
self.value = "http://{}".format(self.value)
self.value = urlnorm.norm(self.value)
except urlnorm.InvalidUrl:
raise ValidationError("Invalid URL: {}".format(self.value))
|
9ad5f279c33339ab00b1fcf90975c085afe0ab43 | mysite/extra_translations.py | mysite/extra_translations.py | # This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
| # -*- coding: utf-8 -*-
# This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
_('Title / pre-nominal honorific (e.g. Dr, Sir, etc.)')
_('Full name')
_('Post-nominal letters (e.g. CBE, DSO, etc.)')
_('Email')
_('Gender (e.g. “male”, “female”)')
_('Date of birth (a four digit year or a full date)')
_('User facing description of the information')
_('Name of the Popolo related type')
_('Type of HTML field the user will see')
_('Value to put in the info_type_key e.g. twitter')
_('Name of the field in the array that stores the value, e.g url for links, value for contact_type, identifier for identifiers')
_('Twitter username (e.g. democlub)')
_('Twitter username (e.g. democlub)')
_('Facebook page (e.g. for their campaign)')
_('Homepage URL')
_('Wikipedia URL')
_('LinkedIn URL')
_("The party's candidate page for this person")
| Add some more text used in migrations which need translation | Add some more text used in migrations which need translation
| Python | agpl-3.0 | mysociety/yournextrepresentative,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextmp-popit,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative | # This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
Add some more text used in migrations which need translation | # -*- coding: utf-8 -*-
# This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
_('Title / pre-nominal honorific (e.g. Dr, Sir, etc.)')
_('Full name')
_('Post-nominal letters (e.g. CBE, DSO, etc.)')
_('Email')
_('Gender (e.g. “male”, “female”)')
_('Date of birth (a four digit year or a full date)')
_('User facing description of the information')
_('Name of the Popolo related type')
_('Type of HTML field the user will see')
_('Value to put in the info_type_key e.g. twitter')
_('Name of the field in the array that stores the value, e.g url for links, value for contact_type, identifier for identifiers')
_('Twitter username (e.g. democlub)')
_('Twitter username (e.g. democlub)')
_('Facebook page (e.g. for their campaign)')
_('Homepage URL')
_('Wikipedia URL')
_('LinkedIn URL')
_("The party's candidate page for this person")
| <commit_before># This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
<commit_msg>Add some more text used in migrations which need translation<commit_after> | # -*- coding: utf-8 -*-
# This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
_('Title / pre-nominal honorific (e.g. Dr, Sir, etc.)')
_('Full name')
_('Post-nominal letters (e.g. CBE, DSO, etc.)')
_('Email')
_('Gender (e.g. “male”, “female”)')
_('Date of birth (a four digit year or a full date)')
_('User facing description of the information')
_('Name of the Popolo related type')
_('Type of HTML field the user will see')
_('Value to put in the info_type_key e.g. twitter')
_('Name of the field in the array that stores the value, e.g url for links, value for contact_type, identifier for identifiers')
_('Twitter username (e.g. democlub)')
_('Twitter username (e.g. democlub)')
_('Facebook page (e.g. for their campaign)')
_('Homepage URL')
_('Wikipedia URL')
_('LinkedIn URL')
_("The party's candidate page for this person")
| # This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
Add some more text used in migrations which need translation# -*- coding: utf-8 -*-
# This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
_('Title / pre-nominal honorific (e.g. Dr, Sir, etc.)')
_('Full name')
_('Post-nominal letters (e.g. CBE, DSO, etc.)')
_('Email')
_('Gender (e.g. “male”, “female”)')
_('Date of birth (a four digit year or a full date)')
_('User facing description of the information')
_('Name of the Popolo related type')
_('Type of HTML field the user will see')
_('Value to put in the info_type_key e.g. twitter')
_('Name of the field in the array that stores the value, e.g url for links, value for contact_type, identifier for identifiers')
_('Twitter username (e.g. democlub)')
_('Twitter username (e.g. democlub)')
_('Facebook page (e.g. for their campaign)')
_('Homepage URL')
_('Wikipedia URL')
_('LinkedIn URL')
_("The party's candidate page for this person")
| <commit_before># This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
<commit_msg>Add some more text used in migrations which need translation<commit_after># -*- coding: utf-8 -*-
# This module exists just to list strings for translation to be picked
# up by makemessages.
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
# Labels for the extra fields which are defined in the database.
# Costa Rica:
_('Profession')
_('Important Roles')
_('Standing for re-election')
# Labels for the person fields which are setup in the database and it pulls
# the label text from the database
_('Name')
_('Family Name')
_('Given Name')
_('Additional Name')
_('Honorific Prefix')
_('Honorific Suffix')
_('Patronymic Name')
_('Sort Name')
_('Email')
_('Gender')
_('Birth Date')
_('Death Date')
_('Summary')
_('Biography')
_('National Identity')
_('Title / pre-nominal honorific (e.g. Dr, Sir, etc.)')
_('Full name')
_('Post-nominal letters (e.g. CBE, DSO, etc.)')
_('Email')
_('Gender (e.g. “male”, “female”)')
_('Date of birth (a four digit year or a full date)')
_('User facing description of the information')
_('Name of the Popolo related type')
_('Type of HTML field the user will see')
_('Value to put in the info_type_key e.g. twitter')
_('Name of the field in the array that stores the value, e.g url for links, value for contact_type, identifier for identifiers')
_('Twitter username (e.g. democlub)')
_('Twitter username (e.g. democlub)')
_('Facebook page (e.g. for their campaign)')
_('Homepage URL')
_('Wikipedia URL')
_('LinkedIn URL')
_("The party's candidate page for this person")
|
6ce72c5b0726fc2e3ae78c6f0a22e4f03f26a2ca | erpnext/patches/v5_4/update_purchase_cost_against_project.py | erpnext/patches/v5_4/update_purchase_cost_against_project.py | # Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project"):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save() | # Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project", filters={"docstatus": 0}):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save() | Update project cost for draft project only | [fix] Update project cost for draft project only
| Python | agpl-3.0 | mbauskar/helpdesk-erpnext,hernad/erpnext,gangadharkadam/saloon_erp_install,mbauskar/omnitech-demo-erpnext,indictranstech/trufil-erpnext,mbauskar/helpdesk-erpnext,susuchina/ERPNEXT,njmube/erpnext,aruizramon/alec_erpnext,ShashaQin/erpnext,anandpdoshi/erpnext,pombredanne/erpnext,aruizramon/alec_erpnext,mahabuber/erpnext,gangadharkadam/saloon_erp,shft117/SteckerApp,fuhongliang/erpnext,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/saloon_erp,MartinEnder/erpnext-de,gangadharkadam/contributionerp,mbauskar/omnitech-demo-erpnext,gsnbng/erpnext,SPKian/Testing2,hanselke/erpnext-1,indictranstech/biggift-erpnext,mbauskar/omnitech-demo-erpnext,mbauskar/helpdesk-erpnext,mbauskar/alec_frappe5_erpnext,mahabuber/erpnext,Aptitudetech/ERPNext,aruizramon/alec_erpnext,sagar30051991/ozsmart-erp,hernad/erpnext,SPKian/Testing2,mahabuber/erpnext,indictranstech/erpnext,mbauskar/omnitech-erpnext,njmube/erpnext,susuchina/ERPNEXT,gangadharkadam/contributionerp,mbauskar/helpdesk-erpnext,gangadharkadam/v6_erp,ShashaQin/erpnext,anandpdoshi/erpnext,gangadharkadam/saloon_erp_install,fuhongliang/erpnext,Tejal011089/huntercamp_erpnext,gangadharkadam/v6_erp,Tejal011089/huntercamp_erpnext,mbauskar/sapphire-erpnext,pombredanne/erpnext,indictranstech/biggift-erpnext,indictranstech/osmosis-erpnext,MartinEnder/erpnext-de,gmarke/erpnext,shft117/SteckerApp,hernad/erpnext,indictranstech/reciphergroup-erpnext,fuhongliang/erpnext,gsnbng/erpnext,gangadhar-kadam/helpdesk-erpnext,anandpdoshi/erpnext,sheafferusa/erpnext,mbauskar/alec_frappe5_erpnext,mahabuber/erpnext,indictranstech/biggift-erpnext,indictranstech/erpnext,gsnbng/erpnext,SPKian/Testing,gmarke/erpnext,indictranstech/osmosis-erpnext,sagar30051991/ozsmart-erp,susuchina/ERPNEXT,hatwar/buyback-erpnext,njmube/erpnext,SPKian/Testing,SPKian/Testing2,geekroot/erpnext,geekroot/erpnext,indictranstech/trufil-erpnext,SPKian/Testing2,sheafferusa/erpnext,ShashaQin/erpnext,anandpdoshi/erpnext,SPKian/Testing,ShashaQin/erpnext,gangadhar-kadam/helpdesk-erpnext,meisterkleister/erpnext,indictranstech/erpnext,hanselke/erpnext-1,indictranstech/erpnext,indictranstech/biggift-erpnext,hatwar/buyback-erpnext,gangadharkadam/contributionerp,mbauskar/omnitech-erpnext,hatwar/buyback-erpnext,sagar30051991/ozsmart-erp,njmube/erpnext,indictranstech/reciphergroup-erpnext,hernad/erpnext,gangadharkadam/saloon_erp,pombredanne/erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/v6_erp,sheafferusa/erpnext,mbauskar/omnitech-erpnext,mbauskar/sapphire-erpnext,meisterkleister/erpnext,indictranstech/reciphergroup-erpnext,indictranstech/osmosis-erpnext,meisterkleister/erpnext,Tejal011089/huntercamp_erpnext,gangadharkadam/contributionerp,mbauskar/sapphire-erpnext,MartinEnder/erpnext-de,gmarke/erpnext,gsnbng/erpnext,mbauskar/omnitech-erpnext,susuchina/ERPNEXT,indictranstech/osmosis-erpnext,hanselke/erpnext-1,mbauskar/alec_frappe5_erpnext,hatwar/buyback-erpnext,geekroot/erpnext,sheafferusa/erpnext,gmarke/erpnext,hanselke/erpnext-1,gangadhar-kadam/helpdesk-erpnext,mbauskar/omnitech-demo-erpnext,shft117/SteckerApp,mbauskar/sapphire-erpnext,gangadharkadam/saloon_erp_install,indictranstech/reciphergroup-erpnext,gangadharkadam/saloon_erp_install,pombredanne/erpnext,aruizramon/alec_erpnext,SPKian/Testing,sagar30051991/ozsmart-erp,fuhongliang/erpnext,indictranstech/trufil-erpnext,shft117/SteckerApp,MartinEnder/erpnext-de,gangadharkadam/saloon_erp,geekroot/erpnext,meisterkleister/erpnext,Tejal011089/huntercamp_erpnext,gangadharkadam/v6_erp,indictranstech/trufil-erpnext | # Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project"):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save()[fix] Update project cost for draft project only | # Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project", filters={"docstatus": 0}):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save() | <commit_before># Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project"):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save()<commit_msg>[fix] Update project cost for draft project only<commit_after> | # Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project", filters={"docstatus": 0}):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save() | # Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project"):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save()[fix] Update project cost for draft project only# Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project", filters={"docstatus": 0}):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save() | <commit_before># Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project"):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save()<commit_msg>[fix] Update project cost for draft project only<commit_after># Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
for p in frappe.get_all("Project", filters={"docstatus": 0}):
project = frappe.get_doc("Project", p.name)
project.update_purchase_costing()
project.save() |
7232f3cfe495814f5c9923cd715d4dff40458c5a | takeyourmeds/api/views.py | takeyourmeds/api/views.py | from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return Reminder.objects.filter(user=self.request.user)
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
| from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return self.request.user.reminders.all()
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
| Use related_name to avoid "missing" security | Use related_name to avoid "missing" security
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
| Python | mit | takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web | from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return Reminder.objects.filter(user=self.request.user)
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
Use related_name to avoid "missing" security
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk> | from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return self.request.user.reminders.all()
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
| <commit_before>from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return Reminder.objects.filter(user=self.request.user)
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
<commit_msg>Use related_name to avoid "missing" security
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after> | from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return self.request.user.reminders.all()
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
| from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return Reminder.objects.filter(user=self.request.user)
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
Use related_name to avoid "missing" security
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return self.request.user.reminders.all()
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
| <commit_before>from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return Reminder.objects.filter(user=self.request.user)
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
<commit_msg>Use related_name to avoid "missing" security
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>from rest_framework import serializers, viewsets
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework.permissions import IsAuthenticated
from takeyourmeds.reminder.models import Reminder
class ReminderTimeField(serializers.RelatedField):
def to_representation(self, model):
return model.cronstring
class ReminderSerializer(serializers.ModelSerializer):
times = ReminderTimeField(many=True, read_only=True)
def create(self, data):
req = self.context['request']
data['user_id'] = req.user.pk
obj = super(ReminderSerializer, self).create(data)
for x in req.data.get('times', []):
obj.times.create(cronstring=x)
return obj
class Meta:
model = Reminder
fields = (
'times',
'message',
'audiourl',
'telnumber',
)
class ReminderViewSet(viewsets.ModelViewSet):
queryset = Reminder.objects.all()
serializer_class = ReminderSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
return self.request.user.reminders.all()
@api_view(('POST',))
def trigger_now(request):
# FIXME: Move parameter to urlconf
pk = request.data.get('id')
reminder = Reminder.objects.get(pk=pk)
reminder.dispatch_task()
return Response({'message': "Triggered"})
|
64336620b0b2c279293e921ba0a7cdd15a573d85 | intelmq/bots/parsers/cznic/parser_proki.py | intelmq/bots/parsers/cznic/parser_proki.py | # -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if "data" not in report or not len(report.get("data")):
return
for line in report.get("data"):
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
| # -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if isinstance(report, dict) and "data" in report:
# extract event list from recieved JSON
report = report.get("data")
for line in report:
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
| Allow loading events from dump | Allow loading events from dump
| Python | agpl-3.0 | aaronkaplan/intelmq,certtools/intelmq,certtools/intelmq,aaronkaplan/intelmq,certtools/intelmq,aaronkaplan/intelmq | # -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if "data" not in report or not len(report.get("data")):
return
for line in report.get("data"):
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
Allow loading events from dump | # -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if isinstance(report, dict) and "data" in report:
# extract event list from recieved JSON
report = report.get("data")
for line in report:
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
| <commit_before># -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if "data" not in report or not len(report.get("data")):
return
for line in report.get("data"):
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
<commit_msg>Allow loading events from dump<commit_after> | # -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if isinstance(report, dict) and "data" in report:
# extract event list from recieved JSON
report = report.get("data")
for line in report:
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
| # -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if "data" not in report or not len(report.get("data")):
return
for line in report.get("data"):
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
Allow loading events from dump# -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if isinstance(report, dict) and "data" in report:
# extract event list from recieved JSON
report = report.get("data")
for line in report:
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
| <commit_before># -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if "data" not in report or not len(report.get("data")):
return
for line in report.get("data"):
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
<commit_msg>Allow loading events from dump<commit_after># -*- coding: utf-8 -*-
import json
from intelmq.lib import utils
from intelmq.lib.bot import ParserBot
class CZNICProkiParserBot(ParserBot):
recover_line = ParserBot.recover_line_json
def parse(self, report):
raw_report = utils.base64_decode(report.get("raw"))
report = json.loads(raw_report)
if isinstance(report, dict) and "data" in report:
# extract event list from recieved JSON
report = report.get("data")
for line in report:
yield line
def parse_line(self, line, report):
event = self.new_event(report)
# json keys map 1:1 to harmonization fields
for field in line:
if field == "feed.name":
event.add("extra.original_feed_name", line.get(field))
elif field == "time.observation":
event.add("extra.original_time_observation", line.get(field))
else:
event.add(field, line.get(field))
event.add("raw", self.recover_line(line))
yield event
BOT = CZNICProkiParserBot
|
0d8888ef1bfa056b9fd440b227a3e3d84b10d541 | src/suit_dashboard/layout.py | src/suit_dashboard/layout.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Column) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Row) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
| Fix type check of column elements | Fix type check of column elements
| Python | isc | Pawamoy/django-suit-dashboard,Pawamoy/django-suit-dashboard,Pawamoy/django-suit-dashboard,Pawamoy/django-suit-dashboard | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Column) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
Fix type check of column elements | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Row) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Column) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
<commit_msg>Fix type check of column elements<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Row) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Column) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
Fix type check of column elements# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Row) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Column) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
<commit_msg>Fix type check of column elements<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from suit_dashboard.box import Box
class Grid(object):
def __init__(self, *rows, **kwargs):
if not all([isinstance(r, Row) for r in rows]):
raise TypeError('All elements of Grid must be Row instances')
self.type = 'grid'
self.rows = rows
class Row(object):
def __init__(self, *columns, **kwargs):
if not all([isinstance(c, Column) for c in columns]):
raise TypeError('All elements of Row must be Column instances')
self.type = 'row'
self.columns = columns
class Column(object):
def __init__(self, *elements, **kwargs):
if not all([isinstance(e, Row) or issubclass(type(e), Box)
for e in elements]):
raise TypeError('All elements of Column must '
'be Row or Box instances')
width = kwargs.pop('width', 12)
if width not in range(1, 13):
raise ValueError('Column width must be between 1 and 12')
self.type = 'column'
self.elements = elements
self.width = width
|
c6d4a0e34a0e1ef1ea330734477aac434322ff01 | extensions/ExtGameController.py | extensions/ExtGameController.py | from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
| from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
| Update extensions and GameController subclass | Update extensions and GameController subclass
| Python | apache-2.0 | dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server | from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
Update extensions and GameController subclass | from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
| <commit_before>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
<commit_msg>Update extensions and GameController subclass<commit_after> | from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
| from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
Update extensions and GameController subclassfrom python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
| <commit_before>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
<commit_msg>Update extensions and GameController subclass<commit_after>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
087fd390c5c19d0187102cc2dbe1ac9ac8c4fb03 | perfrunner/workloads/n1ql.py | perfrunner/workloads/n1ql.py | INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
),
'range': (
'CREATE INDEX by_coins ON {}(coins.f)',
'CREATE INDEX by_achievement ON {}(achievements)',
'CREATE INDEX by_category ON {}(category)',
'CREATE INDEX by_year ON {}(year)',
),
'multi_emits': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'body': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'group_by': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_gmtime ON {}(gmtime)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
}
| INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
),
'range': (
'CREATE INDEX by_coins ON `{}` (coins.f)',
'CREATE INDEX by_achievement ON `{}` (achievements)',
'CREATE INDEX by_category ON `{}` (category)',
'CREATE INDEX by_year ON `{}` (year)',
),
'multi_emits': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'body': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'group_by': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_gmtime ON `{}` (gmtime)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
}
| Correct syntax while creating indexes | Correct syntax while creating indexes
Change-Id: I90625647d8723531dbc7498d5d25e84ef1a3ed2b
Reviewed-on: http://review.couchbase.org/50007
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com>
| Python | apache-2.0 | dkao-cb/perfrunner,couchbase/perfrunner,EricACooper/perfrunner,pavel-paulau/perfrunner,mikewied/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,EricACooper/perfrunner,pavel-paulau/perfrunner,dkao-cb/perfrunner,vmx/perfrunner,thomas-couchbase/perfrunner,hsharsha/perfrunner,vmx/perfrunner,couchbase/perfrunner,thomas-couchbase/perfrunner,pavel-paulau/perfrunner,PaintScratcher/perfrunner,couchbase/perfrunner,couchbase/perfrunner,couchbase/perfrunner,EricACooper/perfrunner,PaintScratcher/perfrunner,pavel-paulau/perfrunner,mikewied/perfrunner,EricACooper/perfrunner,hsharsha/perfrunner | INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
),
'range': (
'CREATE INDEX by_coins ON {}(coins.f)',
'CREATE INDEX by_achievement ON {}(achievements)',
'CREATE INDEX by_category ON {}(category)',
'CREATE INDEX by_year ON {}(year)',
),
'multi_emits': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'body': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'group_by': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_gmtime ON {}(gmtime)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
}
Correct syntax while creating indexes
Change-Id: I90625647d8723531dbc7498d5d25e84ef1a3ed2b
Reviewed-on: http://review.couchbase.org/50007
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com> | INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
),
'range': (
'CREATE INDEX by_coins ON `{}` (coins.f)',
'CREATE INDEX by_achievement ON `{}` (achievements)',
'CREATE INDEX by_category ON `{}` (category)',
'CREATE INDEX by_year ON `{}` (year)',
),
'multi_emits': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'body': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'group_by': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_gmtime ON `{}` (gmtime)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
}
| <commit_before>INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
),
'range': (
'CREATE INDEX by_coins ON {}(coins.f)',
'CREATE INDEX by_achievement ON {}(achievements)',
'CREATE INDEX by_category ON {}(category)',
'CREATE INDEX by_year ON {}(year)',
),
'multi_emits': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'body': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'group_by': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_gmtime ON {}(gmtime)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
}
<commit_msg>Correct syntax while creating indexes
Change-Id: I90625647d8723531dbc7498d5d25e84ef1a3ed2b
Reviewed-on: http://review.couchbase.org/50007
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com><commit_after> | INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
),
'range': (
'CREATE INDEX by_coins ON `{}` (coins.f)',
'CREATE INDEX by_achievement ON `{}` (achievements)',
'CREATE INDEX by_category ON `{}` (category)',
'CREATE INDEX by_year ON `{}` (year)',
),
'multi_emits': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'body': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'group_by': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_gmtime ON `{}` (gmtime)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
}
| INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
),
'range': (
'CREATE INDEX by_coins ON {}(coins.f)',
'CREATE INDEX by_achievement ON {}(achievements)',
'CREATE INDEX by_category ON {}(category)',
'CREATE INDEX by_year ON {}(year)',
),
'multi_emits': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'body': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'group_by': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_gmtime ON {}(gmtime)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
}
Correct syntax while creating indexes
Change-Id: I90625647d8723531dbc7498d5d25e84ef1a3ed2b
Reviewed-on: http://review.couchbase.org/50007
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com>INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
),
'range': (
'CREATE INDEX by_coins ON `{}` (coins.f)',
'CREATE INDEX by_achievement ON `{}` (achievements)',
'CREATE INDEX by_category ON `{}` (category)',
'CREATE INDEX by_year ON `{}` (year)',
),
'multi_emits': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'body': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'group_by': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_gmtime ON `{}` (gmtime)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
}
| <commit_before>INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
),
'range': (
'CREATE INDEX by_coins ON {}(coins.f)',
'CREATE INDEX by_achievement ON {}(achievements)',
'CREATE INDEX by_category ON {}(category)',
'CREATE INDEX by_year ON {}(year)',
),
'multi_emits': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_county ON {}(county.f.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'body': (
'CREATE INDEX by_city ON {}(city.f.f)',
'CREATE INDEX by_realm ON {}(realm.f)',
'CREATE INDEX by_country ON {}(country.f)',
),
'group_by': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_gmtime ON {}(gmtime)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON {}(state.f)',
'CREATE INDEX by_year ON {}(year)',
'CREATE INDEX by_full_state ON {}(full_state.f)',
),
}
<commit_msg>Correct syntax while creating indexes
Change-Id: I90625647d8723531dbc7498d5d25e84ef1a3ed2b
Reviewed-on: http://review.couchbase.org/50007
Tested-by: buildbot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Michael Wiederhold <a17fed27eaa842282862ff7c1b9c8395a26ac320@couchbase.com><commit_after>INDEX_STATEMENTS = {
'basic': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
),
'range': (
'CREATE INDEX by_coins ON `{}` (coins.f)',
'CREATE INDEX by_achievement ON `{}` (achievements)',
'CREATE INDEX by_category ON `{}` (category)',
'CREATE INDEX by_year ON `{}` (year)',
),
'multi_emits': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_county ON `{}` (county.f.f)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'body': (
'CREATE INDEX by_city ON `{}` (city.f.f)',
'CREATE INDEX by_realm ON `{}` (`realm.f`)',
'CREATE INDEX by_country ON `{}` (country.f)',
),
'group_by': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_gmtime ON `{}` (gmtime)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
'distinct': (
'CREATE INDEX by_state ON `{}` (state.f)',
'CREATE INDEX by_year ON `{}` (year)',
'CREATE INDEX by_full_state ON `{}` (full_state.f)',
),
}
|
7d6c9ac443dd34784f00fd4d7bc0cbee904ed98f | src/python/cargo/temporal.py | src/python/cargo/temporal.py | """
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance, zoned pytz.utc.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
| """
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
| Fix comment after dropping the pytz dependency. | Fix comment after dropping the pytz dependency.
| Python | mit | borg-project/cargo,borg-project/cargo | """
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance, zoned pytz.utc.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
Fix comment after dropping the pytz dependency. | """
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
| <commit_before>"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance, zoned pytz.utc.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
<commit_msg>Fix comment after dropping the pytz dependency.<commit_after> | """
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
| """
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance, zoned pytz.utc.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
Fix comment after dropping the pytz dependency."""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
| <commit_before>"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance, zoned pytz.utc.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
<commit_msg>Fix comment after dropping the pytz dependency.<commit_after>"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from datetime import tzinfo
class UTC(tzinfo):
"""
The one true time zone.
"""
def utcoffset(self, dt):
"""
Return the offset to UTC.
"""
from datetime import timedelta
return timedelta(0)
def tzname(self, dt):
"""
Return the time zone name.
"""
return "UTC"
def dst(self, dt):
"""
Return the DST offset.
"""
from datetime import timedelta
return timedelta(0)
def utc_now():
"""
Return a non-naive UTC datetime instance.
"""
from datetime import datetime as DateTime
return DateTime.now(UTC())
def seconds(value):
"""
Return the equivalent number of seconds, floating-point.
"""
return value.days * 8.64e4 + value.seconds + value.microseconds / 1e6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.