commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f9d0d6af3d4b2d4b4ca88ba5aa0565f29528bf96 | snakeplan/projects/urls.py | snakeplan/projects/urls.py | from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
| from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
(r'^(.*)/iterations', 'projects.project_iterations'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
| Add /project/<id>/iterations route for the hell of it(I think it makes more sense) | Add /project/<id>/iterations route for the hell of it(I think it makes more sense)
| Python | apache-2.0 | mcrute/snakeplan,mcrute/snakeplan,mcrute/snakeplan | from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
Add /project/<id>/iterations route for the hell of it(I think it makes more sense) | from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
(r'^(.*)/iterations', 'projects.project_iterations'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
| <commit_before>from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
<commit_msg>Add /project/<id>/iterations route for the hell of it(I think it makes more sense)<commit_after> | from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
(r'^(.*)/iterations', 'projects.project_iterations'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
| from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
Add /project/<id>/iterations route for the hell of it(I think it makes more sense)from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
(r'^(.*)/iterations', 'projects.project_iterations'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
| <commit_before>from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
<commit_msg>Add /project/<id>/iterations route for the hell of it(I think it makes more sense)<commit_after>from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('snakeplan.projects.views',
(r'^$', 'projects.index'),
(r'^story/(.*)/', 'stories.index'),
(r'^iteration/(.*)/', 'iterations.index'),
(r'^create/', 'projects.create_project'),
(r'^(.*)/iterations', 'projects.project_iterations'),
url(r'^(.*)/', 'projects.project_iterations', name='project_iterations'),
)
|
43bd5b33a0342213e2c8e1782744624fadead61e | partner_compassion/model/mail_followers.py | partner_compassion/model/mail_followers.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| Fix typo in follower restrictions | Fix typo in follower restrictions
| Python | agpl-3.0 | CompassionCH/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
Fix typo in follower restrictions | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| <commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
<commit_msg>Fix typo in follower restrictions<commit_after> | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
Fix typo in follower restrictions# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| <commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
<commit_msg>Fix typo in follower restrictions<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
|
35966403b243f7ac7effa0bc69e2938b9324278b | tools/lxlcrawler/list_sao_terms.py | tools/lxlcrawler/list_sao_terms.py | #!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&limit=200&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
| #!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&_limit=1024&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
| Change limit in lxlcrawler sao example script | Change limit in lxlcrawler sao example script
| Python | cc0-1.0 | Kungbib/datalab,Kungbib/datalab | #!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&limit=200&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
Change limit in lxlcrawler sao example script | #!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&_limit=1024&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
| <commit_before>#!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&limit=200&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
<commit_msg>Change limit in lxlcrawler sao example script<commit_after> | #!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&_limit=1024&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
| #!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&limit=200&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
Change limit in lxlcrawler sao example script#!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&_limit=1024&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
| <commit_before>#!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&limit=200&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
<commit_msg>Change limit in lxlcrawler sao example script<commit_after>#!/usr/bin/env python3
import lxlcrawler
start_url = "https://id.kb.se/find?q=*&_limit=1024&inScheme.@id=https://id.kb.se/term/sao"
for i, item in enumerate(lxlcrawler.crawl(start_url)):
print("[{}] {}, {}".format(i + 1, item['@id'], item.get('prefLabel')))
|
e37ae5f799e02cc2308793af585316557e59e6cf | froide/redaction/utils.py | froide/redaction/utils.py | import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
open(filename, 'w').write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
| import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
with open(filename, 'wb') as f:
f.write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
| Write to png file in binary mode | Write to png file in binary mode | Python | mit | stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide | import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
open(filename, 'w').write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
Write to png file in binary mode | import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
with open(filename, 'wb') as f:
f.write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
| <commit_before>import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
open(filename, 'w').write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
<commit_msg>Write to png file in binary mode<commit_after> | import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
with open(filename, 'wb') as f:
f.write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
| import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
open(filename, 'w').write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
Write to png file in binary modeimport os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
with open(filename, 'wb') as f:
f.write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
| <commit_before>import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
open(filename, 'w').write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
<commit_msg>Write to png file in binary mode<commit_after>import os
import base64
import tempfile
import subprocess
def convert_to_pdf(post):
path = tempfile.mkdtemp()
pagenr = 1
while True:
data = post.get('page_%s' % pagenr)
if data is None:
break
if not data.startswith('data:image/png;base64,'):
continue
prefix, data = data.split(',', 1)
filename = os.path.join(path, 'page_%03d.png' % pagenr)
with open(filename, 'wb') as f:
f.write(base64.b64decode(data))
pagenr += 1
filename = os.path.join(path, 'page_*')
output_file = os.path.join(path, 'final.pdf')
if subprocess.call(["convert", filename, output_file]) == 0:
return output_file
return None
|
2a5b68a9357ac576a684880549a93f32335e5761 | pyramid_zipkin/__init__.py | pyramid_zipkin/__init__.py | from pyramid.tweens import MAIN
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=MAIN)
| from pyramid.tweens import EXCVIEW
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=EXCVIEW)
| Change to over=EXCVIEW to make response status work correctly | Change to over=EXCVIEW to make response status work correctly
| Python | apache-2.0 | Yelp/pyramid_zipkin,bplotnick/pyramid_zipkin | from pyramid.tweens import MAIN
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=MAIN)
Change to over=EXCVIEW to make response status work correctly | from pyramid.tweens import EXCVIEW
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=EXCVIEW)
| <commit_before>from pyramid.tweens import MAIN
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=MAIN)
<commit_msg>Change to over=EXCVIEW to make response status work correctly<commit_after> | from pyramid.tweens import EXCVIEW
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=EXCVIEW)
| from pyramid.tweens import MAIN
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=MAIN)
Change to over=EXCVIEW to make response status work correctlyfrom pyramid.tweens import EXCVIEW
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=EXCVIEW)
| <commit_before>from pyramid.tweens import MAIN
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=MAIN)
<commit_msg>Change to over=EXCVIEW to make response status work correctly<commit_after>from pyramid.tweens import EXCVIEW
def includeme(config): # pragma: no cover
"""
:type config: :class:`pyramid.config.Configurator`
"""
config.add_tween('pyramid_zipkin.zipkin.zipkin_tween', over=EXCVIEW)
|
3cfbe13f53837a0cb5065b37f4c0a6ae5c9dd50d | cutthroat/views/signin.py | cutthroat/views/signin.py | from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
else:
self.redirect("/views/room/join")
| from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
elif player["current_room"]:
raise NotImplementedError
# self.redirect("<Insert URL to room page here>")
else:
self.redirect("/views/room/join")
| Add room redirect clause to Landing | Add room redirect clause to Landing
| Python | agpl-3.0 | hfaran/LivesPool,hfaran/LivesPool,hfaran/LivesPool,hfaran/LivesPool | from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
else:
self.redirect("/views/room/join")
Add room redirect clause to Landing | from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
elif player["current_room"]:
raise NotImplementedError
# self.redirect("<Insert URL to room page here>")
else:
self.redirect("/views/room/join")
| <commit_before>from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
else:
self.redirect("/views/room/join")
<commit_msg>Add room redirect clause to Landing<commit_after> | from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
elif player["current_room"]:
raise NotImplementedError
# self.redirect("<Insert URL to room page here>")
else:
self.redirect("/views/room/join")
| from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
else:
self.redirect("/views/room/join")
Add room redirect clause to Landingfrom tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
elif player["current_room"]:
raise NotImplementedError
# self.redirect("<Insert URL to room page here>")
else:
self.redirect("/views/room/join")
| <commit_before>from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
else:
self.redirect("/views/room/join")
<commit_msg>Add room redirect clause to Landing<commit_after>from tornado import template
from tornado.web import authenticated
from cutthroat.handlers import ViewHandler
class SignIn(ViewHandler):
"""SignIn"""
def get(self):
self.render("signin.html")
class Landing(ViewHandler):
"""Landing"""
@authenticated
def get(self):
_, player = self.db_conn._get_player(self.get_current_user())
if player["current_game_id"]:
raise NotImplementedError
# self.redirect("<Insert URL to game view here>")
elif player["current_room"]:
raise NotImplementedError
# self.redirect("<Insert URL to room page here>")
else:
self.redirect("/views/room/join")
|
5f0a4f33196c368318dba21aaa66956d4b973d60 | usig_normalizador_amba/settings.py | usig_normalizador_amba/settings.py | # coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://usig.buenosaires.gov.ar/servicios/Callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
| # coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
| Fix a la url del callejero CABA | Fix a la url del callejero CABA
| Python | mit | usig/normalizador-amba,hogasa/normalizador-amba | # coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://usig.buenosaires.gov.ar/servicios/Callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
Fix a la url del callejero CABA | # coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
| <commit_before># coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://usig.buenosaires.gov.ar/servicios/Callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
<commit_msg>Fix a la url del callejero CABA<commit_after> | # coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
| # coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://usig.buenosaires.gov.ar/servicios/Callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
Fix a la url del callejero CABA# coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
| <commit_before># coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://usig.buenosaires.gov.ar/servicios/Callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
<commit_msg>Fix a la url del callejero CABA<commit_after># coding: UTF-8
from __future__ import absolute_import
default_settings = {
'callejero_amba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero-amba/',
'callejero_caba_server': 'http://servicios.usig.buenosaires.gob.ar/callejero',
}
# Tipo de normalizacion
CALLE = 0
CALLE_ALTURA = 1
CALLE_Y_CALLE = 2
INVALIDO = -1
# Tipo de Match
NO_MATCH = 0
MATCH = 1
MATCH_INCLUIDO = 2
MATCH_PERMUTADO = 3
MATCH_EXACTO = 4
|
8830b0e726a671fa2b0bbafd1487148ae23fc1d4 | admin/forms.py | admin/forms.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get('connection').data
try:
CONNECTION = tuple([(c['slug'], c['name']) for c in bconnection])
except:
CONNECTION = tuple()
name = TextField(validators=[Required()])
conection = SelectField(choices=CONNECTION, validators=[Required()])
sql = TextAreaField(validators=[Required()])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
def ObjGenerate(bucket, key, value=None, _type=tuple):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get(bucket).data
try:
if _type is tuple:
return _type([(c[key], c[value]) for c in bconnection])
return _type(c[key] for c in bconnection)
except:
return _type()
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
name = TextField(validators=[Required()])
conection = SelectField(choices=ObjGenerate('connection', 'slug', 'name'),
validators=[Required()])
sql = TextAreaField(validators=[Required()])
| Create new method object generate get riak bucket and generate tuple or list | Create new method object generate
get riak bucket and generate tuple or list
| Python | mit | chrisdamba/mining,seagoat/mining,avelino/mining,jgabriellima/mining,AndrzejR/mining,jgabriellima/mining,mining/mining,mlgruby/mining,chrisdamba/mining,mlgruby/mining,mlgruby/mining,seagoat/mining,AndrzejR/mining,avelino/mining,mining/mining | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get('connection').data
try:
CONNECTION = tuple([(c['slug'], c['name']) for c in bconnection])
except:
CONNECTION = tuple()
name = TextField(validators=[Required()])
conection = SelectField(choices=CONNECTION, validators=[Required()])
sql = TextAreaField(validators=[Required()])
Create new method object generate
get riak bucket and generate tuple or list | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
def ObjGenerate(bucket, key, value=None, _type=tuple):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get(bucket).data
try:
if _type is tuple:
return _type([(c[key], c[value]) for c in bconnection])
return _type(c[key] for c in bconnection)
except:
return _type()
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
name = TextField(validators=[Required()])
conection = SelectField(choices=ObjGenerate('connection', 'slug', 'name'),
validators=[Required()])
sql = TextAreaField(validators=[Required()])
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get('connection').data
try:
CONNECTION = tuple([(c['slug'], c['name']) for c in bconnection])
except:
CONNECTION = tuple()
name = TextField(validators=[Required()])
conection = SelectField(choices=CONNECTION, validators=[Required()])
sql = TextAreaField(validators=[Required()])
<commit_msg>Create new method object generate
get riak bucket and generate tuple or list<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
def ObjGenerate(bucket, key, value=None, _type=tuple):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get(bucket).data
try:
if _type is tuple:
return _type([(c[key], c[value]) for c in bconnection])
return _type(c[key] for c in bconnection)
except:
return _type()
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
name = TextField(validators=[Required()])
conection = SelectField(choices=ObjGenerate('connection', 'slug', 'name'),
validators=[Required()])
sql = TextAreaField(validators=[Required()])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get('connection').data
try:
CONNECTION = tuple([(c['slug'], c['name']) for c in bconnection])
except:
CONNECTION = tuple()
name = TextField(validators=[Required()])
conection = SelectField(choices=CONNECTION, validators=[Required()])
sql = TextAreaField(validators=[Required()])
Create new method object generate
get riak bucket and generate tuple or list#!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
def ObjGenerate(bucket, key, value=None, _type=tuple):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get(bucket).data
try:
if _type is tuple:
return _type([(c[key], c[value]) for c in bconnection])
return _type(c[key] for c in bconnection)
except:
return _type()
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
name = TextField(validators=[Required()])
conection = SelectField(choices=ObjGenerate('connection', 'slug', 'name'),
validators=[Required()])
sql = TextAreaField(validators=[Required()])
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get('connection').data
try:
CONNECTION = tuple([(c['slug'], c['name']) for c in bconnection])
except:
CONNECTION = tuple()
name = TextField(validators=[Required()])
conection = SelectField(choices=CONNECTION, validators=[Required()])
sql = TextAreaField(validators=[Required()])
<commit_msg>Create new method object generate
get riak bucket and generate tuple or list<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import riak
from wtforms.fields import TextField, TextAreaField, SelectField
from wtforms.validators import Required
from wtforms_tornado import Form
def ObjGenerate(bucket, key, value=None, _type=tuple):
myClient = riak.RiakClient(protocol='http',
http_port=8098,
host='127.0.0.1')
myBucket = myClient.bucket('openmining-admin')
bconnection = myBucket.get(bucket).data
try:
if _type is tuple:
return _type([(c[key], c[value]) for c in bconnection])
return _type(c[key] for c in bconnection)
except:
return _type()
class ConnectionForm(Form):
name = TextField(validators=[Required()])
conection = TextField(validators=[Required()])
class CubeForm(Form):
name = TextField(validators=[Required()])
conection = SelectField(choices=ObjGenerate('connection', 'slug', 'name'),
validators=[Required()])
sql = TextAreaField(validators=[Required()])
|
46e9db6167a9c4f7f778381da888537c00d35bfd | emailsupport/admin.py | emailsupport/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
if email:
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
| Add prev. and next email to context only if exist original (current) | Add prev. and next email to context only if exist original (current)
| Python | mit | rosti-cz/django-emailsupport | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
Add prev. and next email to context only if exist original (current) | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
if email:
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
<commit_msg>Add prev. and next email to context only if exist original (current)<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
if email:
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
Add prev. and next email to context only if exist original (current)# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
if email:
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
<commit_msg>Add prev. and next email to context only if exist original (current)<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Email, Resolution
class ResolutionInline(admin.StackedInline):
model = Resolution
max_num = 1
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'submitter', 'get_state_display')
inlines = [ResolutionInline]
ordering = ('-state', '-created')
change_form_template = 'admin/email_change_form.html'
readonly_fields = ('submitter', 'subject', 'body', 'body_html')
fieldsets = (
('Question', {
'fields': ('submitter', 'subject', 'body', 'body_html', 'state')
}),
)
class Media:
css = {
"all": ("admin/css/admin.css",)
}
def render_change_form(self, *args, **kwargs):
response = super(EmailAdmin, self).render_change_form(*args, **kwargs)
email = response.context_data['original']
if email:
response.context_data['previous_email'] = self.get_previous_email(email)
response.context_data['next_email'] = self.get_next_email(email)
return response
def get_previous_email(self, email):
return Email.objects.get_previous_email(email)
def get_next_email(self, email):
return Email.objects.get_next_email(email)
admin.site.register(Email, EmailAdmin)
|
9359a236bc955a84b53417246fb5b4b2e3d04389 | i18n/tests/loader_tests.py | i18n/tests/loader_tests.py | import unittest
import i18n
class TestFileLoader(unittest.TestCase):
def test_dummy(self):
self.assertTrue(hasattr(i18n, 'resource_loader'))
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
| import unittest
from i18n import resource_loader
class TestFileLoader(unittest.TestCase):
def test_nonexisting_extension(self):
self.assertRaises(resource_loader.I18nFileLoadError, resource_loader.load_resource, "foo.bar")
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
| Add test for nonexisting file extensions. | Add test for nonexisting file extensions.
| Python | mit | tuvistavie/python-i18n | import unittest
import i18n
class TestFileLoader(unittest.TestCase):
def test_dummy(self):
self.assertTrue(hasattr(i18n, 'resource_loader'))
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
Add test for nonexisting file extensions. | import unittest
from i18n import resource_loader
class TestFileLoader(unittest.TestCase):
def test_nonexisting_extension(self):
self.assertRaises(resource_loader.I18nFileLoadError, resource_loader.load_resource, "foo.bar")
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
| <commit_before>import unittest
import i18n
class TestFileLoader(unittest.TestCase):
def test_dummy(self):
self.assertTrue(hasattr(i18n, 'resource_loader'))
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Add test for nonexisting file extensions.<commit_after> | import unittest
from i18n import resource_loader
class TestFileLoader(unittest.TestCase):
def test_nonexisting_extension(self):
self.assertRaises(resource_loader.I18nFileLoadError, resource_loader.load_resource, "foo.bar")
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
| import unittest
import i18n
class TestFileLoader(unittest.TestCase):
def test_dummy(self):
self.assertTrue(hasattr(i18n, 'resource_loader'))
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
Add test for nonexisting file extensions.import unittest
from i18n import resource_loader
class TestFileLoader(unittest.TestCase):
def test_nonexisting_extension(self):
self.assertRaises(resource_loader.I18nFileLoadError, resource_loader.load_resource, "foo.bar")
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
| <commit_before>import unittest
import i18n
class TestFileLoader(unittest.TestCase):
def test_dummy(self):
self.assertTrue(hasattr(i18n, 'resource_loader'))
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Add test for nonexisting file extensions.<commit_after>import unittest
from i18n import resource_loader
class TestFileLoader(unittest.TestCase):
def test_nonexisting_extension(self):
self.assertRaises(resource_loader.I18nFileLoadError, resource_loader.load_resource, "foo.bar")
suite = unittest.TestLoader().loadTestsFromTestCase(TestFileLoader)
unittest.TextTestRunner(verbosity=2).run(suite)
|
d9b46a4d06bf6832aa5dbb394ae97325e0578400 | survey/tests/test_default_settings.py | survey/tests/test_default_settings.py | from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from django.test import tag
from survey import set_default_settings
@tag("set")
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
with self.assertRaises(AttributeError):
self.client.get(url)
set_default_settings()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
| from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
from survey.exporter.tex.survey2tex import Survey2Tex
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
def test_set_root(self):
del settings.ROOT
set_default_settings()
try:
Survey2Tex.generate(self, "/")
except AttributeError:
self.fail("AttributeError: survey failed to set ROOT")
| Add - Test for setting ROOT | Add - Test for setting ROOT
| Python | agpl-3.0 | Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey | from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from django.test import tag
from survey import set_default_settings
@tag("set")
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
with self.assertRaises(AttributeError):
self.client.get(url)
set_default_settings()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
Add - Test for setting ROOT | from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
from survey.exporter.tex.survey2tex import Survey2Tex
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
def test_set_root(self):
del settings.ROOT
set_default_settings()
try:
Survey2Tex.generate(self, "/")
except AttributeError:
self.fail("AttributeError: survey failed to set ROOT")
| <commit_before>from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from django.test import tag
from survey import set_default_settings
@tag("set")
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
with self.assertRaises(AttributeError):
self.client.get(url)
set_default_settings()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
<commit_msg>Add - Test for setting ROOT<commit_after> | from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
from survey.exporter.tex.survey2tex import Survey2Tex
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
def test_set_root(self):
del settings.ROOT
set_default_settings()
try:
Survey2Tex.generate(self, "/")
except AttributeError:
self.fail("AttributeError: survey failed to set ROOT")
| from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from django.test import tag
from survey import set_default_settings
@tag("set")
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
with self.assertRaises(AttributeError):
self.client.get(url)
set_default_settings()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
Add - Test for setting ROOTfrom survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
from survey.exporter.tex.survey2tex import Survey2Tex
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
def test_set_root(self):
del settings.ROOT
set_default_settings()
try:
Survey2Tex.generate(self, "/")
except AttributeError:
self.fail("AttributeError: survey failed to set ROOT")
| <commit_before>from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from django.test import tag
from survey import set_default_settings
@tag("set")
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
with self.assertRaises(AttributeError):
self.client.get(url)
set_default_settings()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
<commit_msg>Add - Test for setting ROOT<commit_after>from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
from survey.exporter.tex.survey2tex import Survey2Tex
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
def test_set_root(self):
del settings.ROOT
set_default_settings()
try:
Survey2Tex.generate(self, "/")
except AttributeError:
self.fail("AttributeError: survey failed to set ROOT")
|
b4b905333f8847be730f30fbc53ac7a172195cdc | src/sentry/api/endpoints/group_events.py | src/sentry/api/endpoints/group_events.py | from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
| from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
| Add query param to event list | Add query param to event list
| Python | bsd-3-clause | looker/sentry,BuildingLink/sentry,mvaled/sentry,fotinakis/sentry,zenefits/sentry,BuildingLink/sentry,gencer/sentry,gencer/sentry,JamesMura/sentry,zenefits/sentry,alexm92/sentry,ifduyue/sentry,BuildingLink/sentry,JamesMura/sentry,JackDanger/sentry,gencer/sentry,fotinakis/sentry,nicholasserra/sentry,JackDanger/sentry,mvaled/sentry,jean/sentry,mvaled/sentry,beeftornado/sentry,beeftornado/sentry,ifduyue/sentry,nicholasserra/sentry,jean/sentry,daevaorn/sentry,ifduyue/sentry,fotinakis/sentry,mitsuhiko/sentry,zenefits/sentry,looker/sentry,ifduyue/sentry,jean/sentry,alexm92/sentry,looker/sentry,BuildingLink/sentry,jean/sentry,alexm92/sentry,daevaorn/sentry,mitsuhiko/sentry,JamesMura/sentry,daevaorn/sentry,gencer/sentry,nicholasserra/sentry,daevaorn/sentry,ifduyue/sentry,looker/sentry,zenefits/sentry,JackDanger/sentry,mvaled/sentry,looker/sentry,JamesMura/sentry,mvaled/sentry,fotinakis/sentry,beeftornado/sentry,BuildingLink/sentry,gencer/sentry,jean/sentry,mvaled/sentry,JamesMura/sentry,zenefits/sentry | from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
Add query param to event list | from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
| <commit_before>from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
<commit_msg>Add query param to event list<commit_after> | from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
| from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
Add query param to event listfrom __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
| <commit_before>from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
<commit_msg>Add query param to event list<commit_after>from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
|
4842b266bb1649477f41e0b914fc16be94696c01 | handlers/websockets.py | handlers/websockets.py | from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
from pocs.utils.logger import get_logger
clients = []
class PanWebSocket(WebSocketHandler):
logger = get_logger(self)
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
self.logger.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
self.logger.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
self.logger.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
self.logger.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
self.logger.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
self.logger.info("WS Closed")
| import logging
from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
clients = []
class PanWebSocket(WebSocketHandler):
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
logging.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
logging.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
logging.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
logging.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
logging.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
logging.info("WS Closed")
| Clean up the logging ofr PAWS | Clean up the logging ofr PAWS
| Python | mit | panoptes/PAWS,panoptes/PAWS,panoptes/PAWS,panoptes/PAWS | from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
from pocs.utils.logger import get_logger
clients = []
class PanWebSocket(WebSocketHandler):
logger = get_logger(self)
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
self.logger.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
self.logger.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
self.logger.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
self.logger.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
self.logger.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
self.logger.info("WS Closed")
Clean up the logging ofr PAWS | import logging
from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
clients = []
class PanWebSocket(WebSocketHandler):
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
logging.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
logging.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
logging.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
logging.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
logging.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
logging.info("WS Closed")
| <commit_before>from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
from pocs.utils.logger import get_logger
clients = []
class PanWebSocket(WebSocketHandler):
logger = get_logger(self)
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
self.logger.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
self.logger.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
self.logger.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
self.logger.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
self.logger.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
self.logger.info("WS Closed")
<commit_msg>Clean up the logging ofr PAWS<commit_after> | import logging
from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
clients = []
class PanWebSocket(WebSocketHandler):
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
logging.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
logging.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
logging.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
logging.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
logging.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
logging.info("WS Closed")
| from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
from pocs.utils.logger import get_logger
clients = []
class PanWebSocket(WebSocketHandler):
logger = get_logger(self)
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
self.logger.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
self.logger.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
self.logger.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
self.logger.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
self.logger.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
self.logger.info("WS Closed")
Clean up the logging ofr PAWSimport logging
from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
clients = []
class PanWebSocket(WebSocketHandler):
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
logging.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
logging.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
logging.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
logging.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
logging.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
logging.info("WS Closed")
| <commit_before>from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
from pocs.utils.logger import get_logger
clients = []
class PanWebSocket(WebSocketHandler):
logger = get_logger(self)
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
self.logger.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
self.logger.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
self.logger.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
self.logger.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
self.logger.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
self.logger.info("WS Closed")
<commit_msg>Clean up the logging ofr PAWS<commit_after>import logging
from tornado.websocket import WebSocketHandler
from zmq.eventloop.zmqstream import ZMQStream
clients = []
class PanWebSocket(WebSocketHandler):
def open(self, channel):
""" Client opening connection to unit """
if channel is None:
channel = self.settings['name']
logging.info("Setting up subscriber for channel: {}".format(channel))
try:
self.stream = ZMQStream(self.settings['msg_subscriber'].subscriber)
# Register the callback
self.stream.on_recv(self.on_data)
logging.info("WS opened for channel {}".format(channel))
# Add this client to our list
clients.append(self)
except Exception as e:
logging.warning("Problem establishing websocket for {}: {}".format(self, e))
def on_data(self, data):
""" From the PANOPTES unit """
msg = data[0].decode('UTF-8')
logging.debug("WS Received: {}".format(msg))
for client in clients:
client.write_message(msg)
def on_message(self, message):
""" From the client """
logging.info("WS Sent: {}".format(message))
# cmd_publisher = self.settings['cmd_publisher']
# try:
# cmd_publisher.send_message('PAWS', message)
# except Exception as e:
# print("Problem sending message from PAWS", e)
def on_close(self):
""" When client closes """
clients.remove(self)
logging.info("WS Closed")
|
6f5be9af15898f089c3ee83ca1f05fbd4570fcfa | src/cms/apps/news/models.py | src/cms/apps/news/models.py | """Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
Page.register_content(NewsFeed)
| """Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
unique_together = (("news_feed", "url_title",),)
Page.register_content(NewsFeed)
| Set unique together on news article. | Set unique together on news article. | Python | bsd-3-clause | lewiscollard/cms,etianen/cms,etianen/cms,danielsamuels/cms,jamesfoley/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,dan-gamble/cms,jamesfoley/cms,danielsamuels/cms,etianen/cms,danielsamuels/cms | """Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
Page.register_content(NewsFeed)
Set unique together on news article. | """Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
unique_together = (("news_feed", "url_title",),)
Page.register_content(NewsFeed)
| <commit_before>"""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
Page.register_content(NewsFeed)
<commit_msg>Set unique together on news article.<commit_after> | """Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
unique_together = (("news_feed", "url_title",),)
Page.register_content(NewsFeed)
| """Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
Page.register_content(NewsFeed)
Set unique together on news article."""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
unique_together = (("news_feed", "url_title",),)
Page.register_content(NewsFeed)
| <commit_before>"""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
Page.register_content(NewsFeed)
<commit_msg>Set unique together on news article.<commit_after>"""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
unique_together = (("news_feed", "url_title",),)
Page.register_content(NewsFeed)
|
824ead425a80feeb7dc1fbd6505cf50c6e2ffd90 | ui_extensions/playground/views.py | ui_extensions/playground/views.py | from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
# We want this tab to display for all resource handlers, so we set the model
# to the ResourceHandler object. If we wanted to target a specific resource
# handler, we could get more specific, e.g. AWSHandler
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html') | from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
# We want this tab to display for all resource handlers, so we return
# True regardless of what Resource Handler is being displayed.
# If the goal is to target a specific Resource
# Handler, say AWSHandler, the body of this method would be:
# return if isinstance(self.instance.cast(), AWSHandler) else False
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html') | Add more info to TabDelegate example | Add more info to TabDelegate example
Provide details on how to limit a tab delegate to a specific resource handler.
| Python | apache-2.0 | CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge | from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
# We want this tab to display for all resource handlers, so we set the model
# to the ResourceHandler object. If we wanted to target a specific resource
# handler, we could get more specific, e.g. AWSHandler
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html')Add more info to TabDelegate example
Provide details on how to limit a tab delegate to a specific resource handler. | from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
# We want this tab to display for all resource handlers, so we return
# True regardless of what Resource Handler is being displayed.
# If the goal is to target a specific Resource
# Handler, say AWSHandler, the body of this method would be:
# return if isinstance(self.instance.cast(), AWSHandler) else False
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html') | <commit_before>from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
# We want this tab to display for all resource handlers, so we set the model
# to the ResourceHandler object. If we wanted to target a specific resource
# handler, we could get more specific, e.g. AWSHandler
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html')<commit_msg>Add more info to TabDelegate example
Provide details on how to limit a tab delegate to a specific resource handler.<commit_after> | from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
# We want this tab to display for all resource handlers, so we return
# True regardless of what Resource Handler is being displayed.
# If the goal is to target a specific Resource
# Handler, say AWSHandler, the body of this method would be:
# return if isinstance(self.instance.cast(), AWSHandler) else False
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html') | from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
# We want this tab to display for all resource handlers, so we set the model
# to the ResourceHandler object. If we wanted to target a specific resource
# handler, we could get more specific, e.g. AWSHandler
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html')Add more info to TabDelegate example
Provide details on how to limit a tab delegate to a specific resource handler.from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
# We want this tab to display for all resource handlers, so we return
# True regardless of what Resource Handler is being displayed.
# If the goal is to target a specific Resource
# Handler, say AWSHandler, the body of this method would be:
# return if isinstance(self.instance.cast(), AWSHandler) else False
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html') | <commit_before>from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
# We want this tab to display for all resource handlers, so we set the model
# to the ResourceHandler object. If we wanted to target a specific resource
# handler, we could get more specific, e.g. AWSHandler
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html')<commit_msg>Add more info to TabDelegate example
Provide details on how to limit a tab delegate to a specific resource handler.<commit_after>from django.shortcuts import render
from extensions.views import admin_extension, tab_extension, \
TabExtensionDelegate, dashboard_extension
from resourcehandlers.models import ResourceHandler
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
class ResourceHandlerTabDelegate(TabExtensionDelegate):
# We want this tab to display for all resource handlers, so we return
# True regardless of what Resource Handler is being displayed.
# If the goal is to target a specific Resource
# Handler, say AWSHandler, the body of this method would be:
# return if isinstance(self.instance.cast(), AWSHandler) else False
def should_display(self):
return True
@admin_extension(
title="Playground Admin",
description="Entrypoint for Playground Admin Extension")
def show_admin_extension(request, **kwargs):
return render(request, template_name='playground/templates/admin.html')
@tab_extension(
model=ResourceHandler,
title="Playground",
description="Entrypoint for Playground Resource Handler Tab Extension",
delegate=ResourceHandlerTabDelegate
)
def show_rh_tab_extension(request, model_id, **kwargs):
return render(request, template_name='playground/templates/tab.html')
@dashboard_extension(
title="Playground",
description='Playground widget')
def show_playground_widget(request):
return render(request, template_name='playground/templates/widget.html') |
b496353400718ef822b686c2e8d452e0df305aa8 | ansible/roles/jenkins/files/scripts/get_siteconfig.py | ansible/roles/jenkins/files/scripts/get_siteconfig.py | import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
print sname['src']
exit(0)
exit(1)
| # Copyright 2015 47Lining LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
# Change 1: strip off leading git+. That prefix is added for ansible_galaxy
# but not supported by Jenkins' Git plugin
# per @semifocused
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
src = sname['src']
if src.startswith("git+"):
src = src[4:]
print src
exit(0)
exit(1)
| Remove repo URL prefix of git+ | Remove repo URL prefix of git+
| Python | apache-2.0 | 47lining/nucleator-core-builder,47lining/nucleator-core-builder,47lining/nucleator-core-builder | import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
print sname['src']
exit(0)
exit(1)
Remove repo URL prefix of git+ | # Copyright 2015 47Lining LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
# Change 1: strip off leading git+. That prefix is added for ansible_galaxy
# but not supported by Jenkins' Git plugin
# per @semifocused
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
src = sname['src']
if src.startswith("git+"):
src = src[4:]
print src
exit(0)
exit(1)
| <commit_before>import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
print sname['src']
exit(0)
exit(1)
<commit_msg>Remove repo URL prefix of git+<commit_after> | # Copyright 2015 47Lining LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
# Change 1: strip off leading git+. That prefix is added for ansible_galaxy
# but not supported by Jenkins' Git plugin
# per @semifocused
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
src = sname['src']
if src.startswith("git+"):
src = src[4:]
print src
exit(0)
exit(1)
| import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
print sname['src']
exit(0)
exit(1)
Remove repo URL prefix of git+# Copyright 2015 47Lining LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
# Change 1: strip off leading git+. That prefix is added for ansible_galaxy
# but not supported by Jenkins' Git plugin
# per @semifocused
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
src = sname['src']
if src.startswith("git+"):
src = src[4:]
print src
exit(0)
exit(1)
| <commit_before>import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
print sname['src']
exit(0)
exit(1)
<commit_msg>Remove repo URL prefix of git+<commit_after># Copyright 2015 47Lining LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, yaml
from nucleator.cli import properties
# Read sources.yml
# find the src= for name: siteconfig
# TODO handle version?
# Change 1: strip off leading git+. That prefix is added for ansible_galaxy
# but not supported by Jenkins' Git plugin
# per @semifocused
import yaml
sources = os.path.join(properties.NUCLEATOR_CONFIG_DIR, "sources.yml")
stream = open(sources, 'r')
slist = yaml.load(stream)
for sname in slist:
if sname['name'] == "siteconfig":
src = sname['src']
if src.startswith("git+"):
src = src[4:]
print src
exit(0)
exit(1)
|
1f2ad74d0ba33de7a964c5d675493434bd6fde74 | days/apps/days/models.py | days/apps/days/models.py | """Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
| """Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
# How to perform lookups on this field: https://docs.djangoproject.com/en/1.9/ref/models/querysets/#month
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
| Document info for performing lookups on date fields | Document info for performing lookups on date fields
| Python | mit | rlucioni/days | """Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
Document info for performing lookups on date fields | """Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
# How to perform lookups on this field: https://docs.djangoproject.com/en/1.9/ref/models/querysets/#month
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
| <commit_before>"""Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
<commit_msg>Document info for performing lookups on date fields<commit_after> | """Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
# How to perform lookups on this field: https://docs.djangoproject.com/en/1.9/ref/models/querysets/#month
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
| """Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
Document info for performing lookups on date fields"""Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
# How to perform lookups on this field: https://docs.djangoproject.com/en/1.9/ref/models/querysets/#month
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
| <commit_before>"""Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
<commit_msg>Document info for performing lookups on date fields<commit_after>"""Models for the days app."""
from django.db import models
class Event(models.Model):
"""Representation of a notable historical event."""
# How to perform lookups on this field: https://docs.djangoproject.com/en/1.9/ref/models/querysets/#month
date = models.DateField(
help_text='When the event occurred.'
)
description = models.TextField(
help_text='A description of the event.'
)
class Meta(object): # pylint: disable=missing-docstring
ordering = ['date']
def __str__(self):
return self.date.strftime('%Y-%-m-%-d') # pylint: disable=no-member
|
25cebf23c84d8e1136a3e2b503e574aa1c7263e6 | dbaas_zabbix/dbaas_api.py | dbaas_zabbix/dbaas_api.py | # -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
| # -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
def get_databaseinfra_engine_name(self):
return self.databaseinfra.engine.engine_type.name
| Add databaseinfra get engine name | Add databaseinfra get engine name
| Python | bsd-3-clause | globocom/dbaas-zabbix,globocom/dbaas-zabbix | # -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
Add databaseinfra get engine name | # -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
def get_databaseinfra_engine_name(self):
return self.databaseinfra.engine.engine_type.name
| <commit_before># -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
<commit_msg>Add databaseinfra get engine name<commit_after> | # -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
def get_databaseinfra_engine_name(self):
return self.databaseinfra.engine.engine_type.name
| # -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
Add databaseinfra get engine name# -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
def get_databaseinfra_engine_name(self):
return self.databaseinfra.engine.engine_type.name
| <commit_before># -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
<commit_msg>Add databaseinfra get engine name<commit_after># -*- coding: utf-8 -*-
class DatabaseAsAServiceApi(object):
def __init__(self, databaseinfra):
self.databaseinfra = databaseinfra
self.driver = self.get_databaseinfra_driver()
self.database_instances = self.get_database_instances()
def get_all_instances(self, ):
return self.databaseinfra.instances.all()
def get_databaseinfra_driver(self):
return self.databaseinfra.get_driver()
def get_database_instances(self):
return self.driver.get_database_instances()
def get_non_database_instances(self,):
return self.driver.get_non_database_instances()
def get_hosts(self,):
instances = self.get_all_instances()
return list(set([instance.hostname for instance in instances]))
def get_environment(self):
return self.databaseinfra.environment
def get_databaseifra_name(self):
return self.databaseinfra.name
def get_databaseinfra_secondary_ips(self):
return self.databaseinfra.cs_dbinfra_attributes.all()
def get_databaseinfra_availability(self):
return self.databaseinfra.plan.is_ha
def get_databaseinfra_engine_name(self):
return self.databaseinfra.engine.engine_type.name
|
ab9a38793645a9c61cf1c320e5a4db9bf7b03ccf | grow/deployments/utils.py | grow/deployments/utils.py | from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
message.branch = repo.head.ref.name
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
| from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
try:
message.branch = repo.head.ref.name
except TypeError:
# Allow operating in an environment with a detached HEAD.
pass
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
| Allow operating in an environment with a detached HEAD. | Allow operating in an environment with a detached HEAD.
| Python | mit | grow/pygrow,denmojo/pygrow,grow/grow,grow/grow,grow/pygrow,codedcolors/pygrow,grow/grow,grow/pygrow,denmojo/pygrow,denmojo/pygrow,denmojo/pygrow,codedcolors/pygrow,codedcolors/pygrow,grow/grow | from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
message.branch = repo.head.ref.name
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
Allow operating in an environment with a detached HEAD. | from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
try:
message.branch = repo.head.ref.name
except TypeError:
# Allow operating in an environment with a detached HEAD.
pass
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
| <commit_before>from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
message.branch = repo.head.ref.name
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
<commit_msg>Allow operating in an environment with a detached HEAD.<commit_after> | from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
try:
message.branch = repo.head.ref.name
except TypeError:
# Allow operating in an environment with a detached HEAD.
pass
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
| from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
message.branch = repo.head.ref.name
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
Allow operating in an environment with a detached HEAD.from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
try:
message.branch = repo.head.ref.name
except TypeError:
# Allow operating in an environment with a detached HEAD.
pass
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
| <commit_before>from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
message.branch = repo.head.ref.name
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
<commit_msg>Allow operating in an environment with a detached HEAD.<commit_after>from .indexes import messages
import git
class Error(Exception):
pass
class NoGitHeadError(Error, ValueError):
pass
def create_commit_message(repo):
message = messages.CommitMessage()
try:
commit = repo.head.commit
except ValueError:
raise NoGitHeadError('On initial commit, no HEAD yet.')
try:
repo.git.diff('--quiet')
has_unstaged_changes = False
except git.exc.GitCommandError:
has_unstaged_changes = True
message.has_unstaged_changes = has_unstaged_changes
message.sha = commit.hexsha
message.message = commit.message
try:
message.branch = repo.head.ref.name
except TypeError:
# Allow operating in an environment with a detached HEAD.
pass
message.author = messages.AuthorMessage(
name=commit.author.name, email=commit.author.email)
return message
|
2b39c89e86ca00ca6bbca88d68e1bccf9c94efd4 | grab/spider/decorators.py | grab/spider/decorators.py | import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
| import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
except Exception as ex:
raise
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
| Fix exception handling in integrity decorator | Fix exception handling in integrity decorator
| Python | mit | SpaceAppsXploration/grab,alihalabyah/grab,giserh/grab,raybuhr/grab,SpaceAppsXploration/grab,maurobaraldi/grab,lorien/grab,DDShadoww/grab,pombredanne/grab-1,maurobaraldi/grab,lorien/grab,raybuhr/grab,DDShadoww/grab,huiyi1990/grab,liorvh/grab,shaunstanislaus/grab,istinspring/grab,kevinlondon/grab,pombredanne/grab-1,huiyi1990/grab,istinspring/grab,shaunstanislaus/grab,codevlabs/grab,alihalabyah/grab,giserh/grab,kevinlondon/grab,liorvh/grab,codevlabs/grab | import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
Fix exception handling in integrity decorator | import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
except Exception as ex:
raise
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
| <commit_before>import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
<commit_msg>Fix exception handling in integrity decorator<commit_after> | import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
except Exception as ex:
raise
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
| import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
Fix exception handling in integrity decoratorimport functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
except Exception as ex:
raise
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
| <commit_before>import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
<commit_msg>Fix exception handling in integrity decorator<commit_after>import functools
import logging
from weblib.error import ResponseNotValid
def integrity(integrity_func, integrity_errors=(ResponseNotValid,),
ignore_errors=()):
"""
Args:
:param integrity_func: couldb callable or string contains name of
method to call
"""
def build_decorator(func):
@functools.wraps(func)
def func_wrapper(self, grab, task):
if isinstance(integrity_func, (list, tuple)):
int_funcs = integrity_func
else:
int_funcs = [integrity_func]
try:
for int_func in int_funcs:
if isinstance(int_func, str):
getattr(self, int_func)(grab)
else:
int_func(grab)
except ignore_errors as ex:
self.stat.inc(ex.__class__.__name__)
grab.meta['integrity_error'] = ex
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
except integrity_errors as ex:
yield task.clone(refresh_cache=True)
self.stat.inc(ex.__class__.__name__)
#logging.error(ex)
except Exception as ex:
raise
else:
grab.meta['integrity_error'] = None
result = func(self, grab, task)
if result is not None:
for event in result:
yield event
return func_wrapper
return build_decorator
|
257b186eb64638d6638be93633d4db02ce14d390 | docker_log_es/storage.py | docker_log_es/storage.py | #!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://127.0.0.1:9200')
http = AsyncHTTPClient()
| #!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://elasticsearch:9200')
http = AsyncHTTPClient()
| Connect to the "elasticsearch" host by default | Connect to the "elasticsearch" host by default
| Python | mit | ei-grad/docker-log-es | #!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://127.0.0.1:9200')
http = AsyncHTTPClient()
Connect to the "elasticsearch" host by default | #!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://elasticsearch:9200')
http = AsyncHTTPClient()
| <commit_before>#!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://127.0.0.1:9200')
http = AsyncHTTPClient()
<commit_msg>Connect to the "elasticsearch" host by default<commit_after> | #!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://elasticsearch:9200')
http = AsyncHTTPClient()
| #!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://127.0.0.1:9200')
http = AsyncHTTPClient()
Connect to the "elasticsearch" host by default#!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://elasticsearch:9200')
http = AsyncHTTPClient()
| <commit_before>#!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://127.0.0.1:9200')
http = AsyncHTTPClient()
<commit_msg>Connect to the "elasticsearch" host by default<commit_after>#!/usr/bin/env python
# encoding: utf-8
import socket
from os import environ as env
from tornado.netutil import Resolver
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
class UnixResolver(Resolver):
def initialize(self, resolver):
self.resolver = resolver
def close(self):
self.resolver.close()
@gen.coroutine
def resolve(self, host, port, *args, **kwargs):
scheme, path = Storage.DOCKER.split('://')
if host == 'docker':
if scheme == 'unix':
raise gen.Return([(socket.AF_UNIX, path)])
elif scheme == 'tcp' or scheme == 'http':
t = path.split(":")
if len(t) > 1:
host, port = t
port = int(port)
else:
host, port = t[0], 80
result = yield self.resolver.resolve(host, port, *args, **kwargs)
raise gen.Return(result)
AsyncHTTPClient.configure(
None,
resolver=UnixResolver(resolver=Resolver()),
max_clients=20000
)
class Storage(object):
CONTAINERS = set([])
DOCKER = env.get('DOCKER_HOST', 'unix:///var/run/docker.sock')
ELASTICSEARCH = env.get('ELASTICSEARCH', 'http://elasticsearch:9200')
http = AsyncHTTPClient()
|
27a944d5fc74972a90e8dd69879ebc27c4412b99 | test/python_api/default-constructor/sb_frame.py | test/python_api/default-constructor/sb_frame.py | """
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.GetDescription(lldb.SBStream())
obj.Clear()
| """
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.FindValue("your_var", lldb.eValueTypeVariableGlobal)
obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget)
obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ)
obj.GetDescription(lldb.SBStream())
obj.Clear()
| Add FindValue() and WatchValue() fuzz calls to the mix. | Add FindValue() and WatchValue() fuzz calls to the mix.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@140439 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb | """
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.GetDescription(lldb.SBStream())
obj.Clear()
Add FindValue() and WatchValue() fuzz calls to the mix.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@140439 91177308-0d34-0410-b5e6-96231b3b80d8 | """
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.FindValue("your_var", lldb.eValueTypeVariableGlobal)
obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget)
obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ)
obj.GetDescription(lldb.SBStream())
obj.Clear()
| <commit_before>"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.GetDescription(lldb.SBStream())
obj.Clear()
<commit_msg>Add FindValue() and WatchValue() fuzz calls to the mix.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@140439 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after> | """
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.FindValue("your_var", lldb.eValueTypeVariableGlobal)
obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget)
obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ)
obj.GetDescription(lldb.SBStream())
obj.Clear()
| """
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.GetDescription(lldb.SBStream())
obj.Clear()
Add FindValue() and WatchValue() fuzz calls to the mix.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@140439 91177308-0d34-0410-b5e6-96231b3b80d8"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.FindValue("your_var", lldb.eValueTypeVariableGlobal)
obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget)
obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ)
obj.GetDescription(lldb.SBStream())
obj.Clear()
| <commit_before>"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.GetDescription(lldb.SBStream())
obj.Clear()
<commit_msg>Add FindValue() and WatchValue() fuzz calls to the mix.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@140439 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.FindValue("your_var", lldb.eValueTypeVariableGlobal)
obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget)
obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
5ff559f386957844f32d6f96987bcece5c9a43cc | webserver/profiles/templatetags/profile_tags.py | webserver/profiles/templatetags/profile_tags.py | """
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png'})
url = "http://www.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
| """
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png',
's': 200})
url = "https://secure.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
| Use secure gravatar and fix gravatar image size | Use secure gravatar and fix gravatar image size
Fixes #106
Fixes #112
| Python | bsd-3-clause | siggame/webserver,siggame/webserver,siggame/webserver | """
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png'})
url = "http://www.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
Use secure gravatar and fix gravatar image size
Fixes #106
Fixes #112 | """
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png',
's': 200})
url = "https://secure.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
| <commit_before>"""
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png'})
url = "http://www.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
<commit_msg>Use secure gravatar and fix gravatar image size
Fixes #106
Fixes #112<commit_after> | """
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png',
's': 200})
url = "https://secure.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
| """
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png'})
url = "http://www.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
Use secure gravatar and fix gravatar image size
Fixes #106
Fixes #112"""
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png',
's': 200})
url = "https://secure.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
| <commit_before>"""
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png'})
url = "http://www.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
<commit_msg>Use secure gravatar and fix gravatar image size
Fixes #106
Fixes #112<commit_after>"""
gravatar_url from https://en.gravatar.com/site/implement/images/django/
"""
from django import template
from django.conf import settings
import urllib
import hashlib
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email):
self.email = template.Variable(email)
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
email_hash = hashlib.md5(email.lower()).hexdigest()
query_str = urllib.urlencode({'d': 'megaminerai.com/static/img/default_profile_image.png',
's': 200})
url = "https://secure.gravatar.com/avatar/{0}?{1}"
return url.format(email_hash, query_str)
@register.tag
def gravatar_url(parser, token):
try:
tag_name, email = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError, "%r tag requires a single argument" % token.contents.split()[0]
return GravatarUrlNode(email)
|
ff445030337b087513114f327b05e89fdfc7d31d | test_sempai.py | test_sempai.py | import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'test_sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
shutil.rmtree(self.direc)
def test_import(self):
import test_sempai
| import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
sys.path.remove(self.direc)
shutil.rmtree(self.direc)
def test_import(self):
import sempai
def test_access(self):
import sempai
assert sempai.three == 3
def test_location(self):
import sempai
assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
| Add a couple more tests | Add a couple more tests
| Python | mit | kragniz/json-sempai | import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'test_sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
shutil.rmtree(self.direc)
def test_import(self):
import test_sempai
Add a couple more tests | import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
sys.path.remove(self.direc)
shutil.rmtree(self.direc)
def test_import(self):
import sempai
def test_access(self):
import sempai
assert sempai.three == 3
def test_location(self):
import sempai
assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
| <commit_before>import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'test_sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
shutil.rmtree(self.direc)
def test_import(self):
import test_sempai
<commit_msg>Add a couple more tests<commit_after> | import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
sys.path.remove(self.direc)
shutil.rmtree(self.direc)
def test_import(self):
import sempai
def test_access(self):
import sempai
assert sempai.three == 3
def test_location(self):
import sempai
assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
| import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'test_sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
shutil.rmtree(self.direc)
def test_import(self):
import test_sempai
Add a couple more testsimport jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
sys.path.remove(self.direc)
shutil.rmtree(self.direc)
def test_import(self):
import sempai
def test_access(self):
import sempai
assert sempai.three == 3
def test_location(self):
import sempai
assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
| <commit_before>import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'test_sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
shutil.rmtree(self.direc)
def test_import(self):
import test_sempai
<commit_msg>Add a couple more tests<commit_after>import jsonsempai
import os
import shutil
import sys
import tempfile
TEST_FILE = '''{
"three": 3
}'''
class TestSempai(object):
def setup(self):
self.direc = tempfile.mkdtemp(prefix='jsonsempai')
sys.path.append(self.direc)
with open(os.path.join(self.direc, 'sempai.json'), 'w') as f:
f.write(TEST_FILE)
def teardown(self):
sys.path.remove(self.direc)
shutil.rmtree(self.direc)
def test_import(self):
import sempai
def test_access(self):
import sempai
assert sempai.three == 3
def test_location(self):
import sempai
assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
|
d594747d7f5027b6994d98eaa17ed59d6dcb40de | tests/model/test_pwave_classifiers.py | tests/model/test_pwave_classifiers.py | from unittest import TestCase
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
| from unittest import TestCase
import os
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
path = os.path.dirname(__file__)
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
| Fix test path to file dir to be able to load classifier data | Fix test path to file dir to be able to load classifier data
| Python | agpl-3.0 | citiususc/construe,citiususc/construe,citiususc/construe | from unittest import TestCase
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
Fix test path to file dir to be able to load classifier data | from unittest import TestCase
import os
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
path = os.path.dirname(__file__)
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
| <commit_before>from unittest import TestCase
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
<commit_msg>Fix test path to file dir to be able to load classifier data<commit_after> | from unittest import TestCase
import os
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
path = os.path.dirname(__file__)
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
| from unittest import TestCase
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
Fix test path to file dir to be able to load classifier datafrom unittest import TestCase
import os
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
path = os.path.dirname(__file__)
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
| <commit_before>from unittest import TestCase
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
<commit_msg>Fix test path to file dir to be able to load classifier data<commit_after>from unittest import TestCase
import os
import numpy as np
from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier
path = os.path.dirname(__file__)
class TestClassifier(TestCase):
def test_classifier(self):
limb = classifier[0]
prec = classifier[1]
X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1)
X_test, Y_test = X_test[:, 0:8], X_test[:, 8:]
d1 = limb.decision_function(X_test)
d2 = prec.decision_function(X_test)
d = np.column_stack((d1, d2))
np.testing.assert_almost_equal(d, Y_test)
|
183234c5b66d12ffa3e57b042dc06a34e60084d2 | controllers/main.py | controllers/main.py | # -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].apply_coupon(order, promo_code)
return coupon_status
| # -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].sudo().apply_coupon(order, promo_code)
return coupon_status
| Allow public users to use coupons on website | [FIX] website_sale_coupon: Allow public users to use coupons on website
| Python | agpl-3.0 | ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo | # -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].apply_coupon(order, promo_code)
return coupon_status
[FIX] website_sale_coupon: Allow public users to use coupons on website | # -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].sudo().apply_coupon(order, promo_code)
return coupon_status
| <commit_before># -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].apply_coupon(order, promo_code)
return coupon_status
<commit_msg>[FIX] website_sale_coupon: Allow public users to use coupons on website<commit_after> | # -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].sudo().apply_coupon(order, promo_code)
return coupon_status
| # -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].apply_coupon(order, promo_code)
return coupon_status
[FIX] website_sale_coupon: Allow public users to use coupons on website# -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].sudo().apply_coupon(order, promo_code)
return coupon_status
| <commit_before># -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].apply_coupon(order, promo_code)
return coupon_status
<commit_msg>[FIX] website_sale_coupon: Allow public users to use coupons on website<commit_after># -*- coding: utf-8 -*-
from odoo import http
from odoo.http import request
class Website_coupon(http.Controller):
@http.route(['/shop/apply_coupon'], type='json', auth="public", website=True)
def shop_apply_coupon(self, promo_code, **post):
order = request.website.sale_get_order()
coupon_status = request.env['sale.coupon.apply.code'].sudo().apply_coupon(order, promo_code)
return coupon_status
|
c9940a91dd78eb2215559f02b356e15a89fcea28 | indra/tests/test_eidos.py | indra/tests/test_eidos.py | import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
| import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
assert(stmt.evidence[0].annotations['found_by'] == \
'causeEffect_ported_syntax_1_verb-${addlabel}')
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
assert(stmt.evidence[0].annotations['found_by'] == \
'ported_syntax_1_verb-Causal')
| Add tests for eidos found_by annotation | Add tests for eidos found_by annotation
| Python | bsd-2-clause | johnbachman/indra,johnbachman/belpy,pvtodorov/indra,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/belpy,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,bgyori/indra,bgyori/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/indra | import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
Add tests for eidos found_by annotation | import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
assert(stmt.evidence[0].annotations['found_by'] == \
'causeEffect_ported_syntax_1_verb-${addlabel}')
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
assert(stmt.evidence[0].annotations['found_by'] == \
'ported_syntax_1_verb-Causal')
| <commit_before>import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
<commit_msg>Add tests for eidos found_by annotation<commit_after> | import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
assert(stmt.evidence[0].annotations['found_by'] == \
'causeEffect_ported_syntax_1_verb-${addlabel}')
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
assert(stmt.evidence[0].annotations['found_by'] == \
'ported_syntax_1_verb-Causal')
| import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
Add tests for eidos found_by annotationimport os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
assert(stmt.evidence[0].annotations['found_by'] == \
'causeEffect_ported_syntax_1_verb-${addlabel}')
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
assert(stmt.evidence[0].annotations['found_by'] == \
'ported_syntax_1_verb-Causal')
| <commit_before>import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
<commit_msg>Add tests for eidos found_by annotation<commit_after>import os
from indra.sources import eidos
from indra.statements import Influence
path_this = os.path.dirname(os.path.abspath(__file__))
test_json = os.path.join(path_this, 'eidos_test.json')
def test_process_json():
ep = eidos.process_json_file(test_json)
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj_delta.get('polarity') == 1
assert stmt.obj_delta.get('polarity') == -1
assert stmt.subj_delta.get('adjectives') == ['large']
assert stmt.obj_delta.get('adjectives') == ['seriously']
assert(stmt.evidence[0].annotations['found_by'] == \
'causeEffect_ported_syntax_1_verb-${addlabel}')
print(stmt)
def test_process_text():
ep = eidos.process_text('The cost of fuel decreases water trucking.')
assert ep is not None
assert len(ep.statements) == 1
stmt = ep.statements[0]
assert isinstance(stmt, Influence)
assert stmt.subj.name == 'cost of fuel'
assert stmt.obj.name == 'water trucking'
assert stmt.obj_delta.get('polarity') == -1
assert(stmt.evidence[0].annotations['found_by'] == \
'ported_syntax_1_verb-Causal')
|
6d83e409dcf56806b433c10012717b3551c69a4f | kobo/hub/decorators.py | kobo/hub/decorators.py | # -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
fqdn = socket.getfqdn(request.META["REMOTE_ADDR"])
prefix, hostname = request.user.username.split("/", 1)
if hostname != fqdn:
raise SuspiciousOperation("Worker's FQDN (%s) doesn't match username (%s)" % (fqdn, hostname))
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
| # -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
| Remove reverse DNS validation for Kobo worker | Remove reverse DNS validation for Kobo worker
| Python | lgpl-2.1 | release-engineering/kobo,release-engineering/kobo,release-engineering/kobo,release-engineering/kobo | # -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
fqdn = socket.getfqdn(request.META["REMOTE_ADDR"])
prefix, hostname = request.user.username.split("/", 1)
if hostname != fqdn:
raise SuspiciousOperation("Worker's FQDN (%s) doesn't match username (%s)" % (fqdn, hostname))
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
Remove reverse DNS validation for Kobo worker | # -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
| <commit_before># -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
fqdn = socket.getfqdn(request.META["REMOTE_ADDR"])
prefix, hostname = request.user.username.split("/", 1)
if hostname != fqdn:
raise SuspiciousOperation("Worker's FQDN (%s) doesn't match username (%s)" % (fqdn, hostname))
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
<commit_msg>Remove reverse DNS validation for Kobo worker<commit_after> | # -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
| # -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
fqdn = socket.getfqdn(request.META["REMOTE_ADDR"])
prefix, hostname = request.user.username.split("/", 1)
if hostname != fqdn:
raise SuspiciousOperation("Worker's FQDN (%s) doesn't match username (%s)" % (fqdn, hostname))
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
Remove reverse DNS validation for Kobo worker# -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
| <commit_before># -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
fqdn = socket.getfqdn(request.META["REMOTE_ADDR"])
prefix, hostname = request.user.username.split("/", 1)
if hostname != fqdn:
raise SuspiciousOperation("Worker's FQDN (%s) doesn't match username (%s)" % (fqdn, hostname))
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
<commit_msg>Remove reverse DNS validation for Kobo worker<commit_after># -*- coding: utf-8 -*-
import socket
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from kobo.decorators import decorator_with_args
from kobo.django.xmlrpc.decorators import *
def validate_worker(func):
def _new_func(request, *args, **kwargs):
if not request.user.is_authenticated():
raise PermissionDenied("Login required.")
if getattr(request, 'worker', None) is None:
raise SuspiciousOperation("User doesn't match any worker: %s" % request.user.username)
return func(request, *args, **kwargs)
_new_func.__name__ = func.__name__
_new_func.__doc__ = func.__doc__
_new_func.__dict__.update(func.__dict__)
return _new_func
|
3d8f50f39f76cbeb07136c75d6e65dc4132d7aa2 | hr_expense_sequence/models/hr_expense_expense.py | hr_expense_sequence/models/hr_expense_expense.py | # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
| # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
@api.model
def account_move_get(self, expense_id):
'''Write expense number on account move'''
vals = super(HrExpense, self).account_move_get(expense_id)
expense = self.browse(expense_id)
vals['ref'] = expense.number
return vals
| Write expense number on account move | Write expense number on account move
| Python | agpl-3.0 | acsone/hr,Antiun/hr,raycarnes/hr,Vauxoo/hr,feketemihai/hr,alanljj/oca_hr,xpansa/hr,thinkopensolutions/hr,open-synergy/hr,yelizariev/hr,microcom/hr,microcom/hr,charbeljc/hr,open-synergy/hr,Endika/hr,iDTLabssl/hr,Endika/hr,hbrunn/hr,alanljj/oca_hr,VitalPet/hr,thinkopensolutions/hr,iDTLabssl/hr,rschnapka/hr,damdam-s/hr,Antiun/hr,acsone/hr,VitalPet/hr,Eficent/hr,rschnapka/hr,yelizariev/hr,charbeljc/hr,feketemihai/hr,abstract-open-solutions/hr,vrenaville/hr,raycarnes/hr,Vauxoo/hr,damdam-s/hr,abstract-open-solutions/hr,Eficent/hr,xpansa/hr,vrenaville/hr,hbrunn/hr | # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
Write expense number on account move | # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
@api.model
def account_move_get(self, expense_id):
'''Write expense number on account move'''
vals = super(HrExpense, self).account_move_get(expense_id)
expense = self.browse(expense_id)
vals['ref'] = expense.number
return vals
| <commit_before># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
<commit_msg>Write expense number on account move<commit_after> | # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
@api.model
def account_move_get(self, expense_id):
'''Write expense number on account move'''
vals = super(HrExpense, self).account_move_get(expense_id)
expense = self.browse(expense_id)
vals['ref'] = expense.number
return vals
| # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
Write expense number on account move# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
@api.model
def account_move_get(self, expense_id):
'''Write expense number on account move'''
vals = super(HrExpense, self).account_move_get(expense_id)
expense = self.browse(expense_id)
vals['ref'] = expense.number
return vals
| <commit_before># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
<commit_msg>Write expense number on account move<commit_after># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class HrExpense(models.Model):
_inherit = 'hr.expense.expense'
number = fields.Char(required=True, default="/", readonly=True)
@api.model
def create(self, vals):
if vals.get('number', '/') == '/':
vals['number'] = self.env['ir.sequence'].get('hr.expense')
return super(HrExpense, self).create(vals)
@api.model
def account_move_get(self, expense_id):
'''Write expense number on account move'''
vals = super(HrExpense, self).account_move_get(expense_id)
expense = self.browse(expense_id)
vals['ref'] = expense.number
return vals
|
a22b0c562a332761fb1094ad72ae607810cccc49 | manager/apps/brand/urls.py | manager/apps/brand/urls.py | from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
| from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand/$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner/$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
| Fix Brand and Owner public links (missing / would get a 404 on brand/ and owner/) | Fix Brand and Owner public links (missing / would get a 404 on brand/ and owner/)
Also follow the same URL scheme as admin (which always have trailing
slash)
| Python | mit | okfn/brand-manager,okfn/opd-brand-manager,okfn/brand-manager,okfn/opd-brand-manager | from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
Fix Brand and Owner public links (missing / would get a 404 on brand/ and owner/)
Also follow the same URL scheme as admin (which always have trailing
slash) | from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand/$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner/$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
| <commit_before>from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
<commit_msg>Fix Brand and Owner public links (missing / would get a 404 on brand/ and owner/)
Also follow the same URL scheme as admin (which always have trailing
slash)<commit_after> | from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand/$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner/$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
| from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
Fix Brand and Owner public links (missing / would get a 404 on brand/ and owner/)
Also follow the same URL scheme as admin (which always have trailing
slash)from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand/$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner/$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
| <commit_before>from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
<commit_msg>Fix Brand and Owner public links (missing / would get a 404 on brand/ and owner/)
Also follow the same URL scheme as admin (which always have trailing
slash)<commit_after>from django.conf.urls import patterns, url
from manager.apps.brand.views import BrandListView, BrandView
from manager.apps.brand.views import OwnerListView, OwnerView
urlpatterns = patterns(
'',
url(r'^brand/$', BrandListView.as_view(), name='brandlist'),
url(r'^brand/(?P<bsin>[1-9A-NP-Z]{6})', BrandView.as_view(), name='brand'),
url(r'^owner/$', OwnerListView.as_view(), name='ownerlist'),
url(r'^owner/(?P<cd>[1-9]+)', OwnerView.as_view(), name='owner'),
)
|
cab50585aca7a25d52436ab5d7fd9f75f08a185b | epiphany/test/test_compiled_c.py | epiphany/test/test_compiled_c.py | from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
| from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
('fib.elf', 441),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
| Add fib.elf to integration tests. | Add fib.elf to integration tests.
| Python | bsd-3-clause | futurecore/revelation,moreati/revelation,moreati/revelation,futurecore/revelation,futurecore/revelation | from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
Add fib.elf to integration tests. | from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
('fib.elf', 441),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
| <commit_before>from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
<commit_msg>Add fib.elf to integration tests.<commit_after> | from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
('fib.elf', 441),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
| from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
Add fib.elf to integration tests.from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
('fib.elf', 441),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
| <commit_before>from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
<commit_msg>Add fib.elf to integration tests.<commit_after>from epiphany.sim import Epiphany
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'c')
@pytest.mark.parametrize("elf_file,expected", [('nothing.elf', 176),
('fib.elf', 441),
])
def test_compiled_c(elf_file, expected, capsys):
"""Test an ELF file that has been compiled from a C function.
This test checks that the correct number of instructions have been executed.
"""
elf_filename = os.path.join(elf_dir, elf_file)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_text = 'Instructions Executed = ' + str(expected)
assert expected_text in out
assert err == ''
assert not epiphany.state.running
|
a412166af39edd7a78a1127dba2ecb5c65986049 | feder/cases/factories.py | feder/cases/factories.py | from feder.cases import models
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = models.Case
| from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
from .models import Case
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = Case
| Clean up import in CaseFactory | Clean up import in CaseFactory
| Python | mit | watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder | from feder.cases import models
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = models.Case
Clean up import in CaseFactory | from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
from .models import Case
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = Case
| <commit_before>from feder.cases import models
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = models.Case
<commit_msg>Clean up import in CaseFactory<commit_after> | from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
from .models import Case
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = Case
| from feder.cases import models
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = models.Case
Clean up import in CaseFactoryfrom feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
from .models import Case
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = Case
| <commit_before>from feder.cases import models
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = models.Case
<commit_msg>Clean up import in CaseFactory<commit_after>from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
from .models import Case
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = Case
|
e96e39bc3b5c540dc2cdcee26c6562c358745f93 | citrination_client/base/tests/test_base_client.py | citrination_client/base/tests/test_base_client.py | from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
client = BaseClient("asdf", "mycitrinationsite")
ver = client.version()
print("Version:"+ver)
assert ver[0].isdigit()
| from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
from citrination_client import __version__
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
ver = __version__
print("Version:" + ver)
assert ver[0].isdigit()
| Update test to use new version location | Update test to use new version location
| Python | apache-2.0 | CitrineInformatics/python-citrination-client | from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
client = BaseClient("asdf", "mycitrinationsite")
ver = client.version()
print("Version:"+ver)
assert ver[0].isdigit()
Update test to use new version location | from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
from citrination_client import __version__
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
ver = __version__
print("Version:" + ver)
assert ver[0].isdigit()
| <commit_before>from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
client = BaseClient("asdf", "mycitrinationsite")
ver = client.version()
print("Version:"+ver)
assert ver[0].isdigit()
<commit_msg>Update test to use new version location<commit_after> | from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
from citrination_client import __version__
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
ver = __version__
print("Version:" + ver)
assert ver[0].isdigit()
| from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
client = BaseClient("asdf", "mycitrinationsite")
ver = client.version()
print("Version:"+ver)
assert ver[0].isdigit()
Update test to use new version locationfrom citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
from citrination_client import __version__
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
ver = __version__
print("Version:" + ver)
assert ver[0].isdigit()
| <commit_before>from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
client = BaseClient("asdf", "mycitrinationsite")
ver = client.version()
print("Version:"+ver)
assert ver[0].isdigit()
<commit_msg>Update test to use new version location<commit_after>from citrination_client.base import BaseClient
from citrination_client.base.errors import CitrinationClientError
from citrination_client import __version__
def test_none_api_key():
"""
Ensures that an error is thrown if a client is instantiated
without an API key
"""
try:
client = BaseClient(None, "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_zero_length_api_key():
"""
Tests that a zero length API key will cause the client to throw
an error on instantiation
"""
try:
client = BaseClient("", "mycitrinationsite")
assert False
except CitrinationClientError:
assert True
def test_version():
"""
Tests that the version is extracted
"""
ver = __version__
print("Version:" + ver)
assert ver[0].isdigit()
|
be315047f477377d19681063906480eb74f1e59f | mqtt_logger/serializers.py | mqtt_logger/serializers.py | """Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['time_recorded', 'topic', 'id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
| """Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
| Remove the topic and time from the pandas index so they are included in the json output again. | Remove the topic and time from the pandas index so they are included in the json output again.
| Python | mit | ast0815/mqtt-hub,ast0815/mqtt-hub | """Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['time_recorded', 'topic', 'id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
Remove the topic and time from the pandas index so they are included in the json output again. | """Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
| <commit_before>"""Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['time_recorded', 'topic', 'id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
<commit_msg>Remove the topic and time from the pandas index so they are included in the json output again.<commit_after> | """Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
| """Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['time_recorded', 'topic', 'id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
Remove the topic and time from the pandas index so they are included in the json output again."""Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
| <commit_before>"""Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['time_recorded', 'topic', 'id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
<commit_msg>Remove the topic and time from the pandas index so they are included in the json output again.<commit_after>"""Serializers for the use with rest-pandas"""
from rest_framework import serializers
from .models import MQTTMessage
import re
import copy
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = MQTTMessage
fields = ['id', 'time_recorded', 'topic', 'payload']
pandas_index = ['id']
def generate_parsing_serializer_class(regex):
"""Generate a serializer class from a regular expression."""
regex = re.compile(regex)
groups = regex.groupindex.keys()
# Copy vanilla MessageSerializer class
class_name = 'DynamicParsingMessageSerializer'
parent_classes = (MessageSerializer,)
class_dict = {}
meta_dict = copy.deepcopy(MessageSerializer.Meta.__dict__)
class_dict['Meta'] = type('Meta', (object,), meta_dict)
# Add additional parsed fields
for group in groups:
name, typ = MQTTMessage._parse_group_name(group)
# Add custom field to the serializer
class_dict['parsed_'+name] = serializers.SerializerMethodField()
class_dict['Meta'].fields.append('parsed_'+name)
# Add a method to actually get the value
def _f(self, obj):
parsed = obj.parse_payload(regex)
if parsed is None or name not in parsed:
return None
else:
return parsed[name]
class_dict['get_parsed_'+name] = _f
return type(class_name, parent_classes, class_dict)
|
ee35232228b8959bb790b971bf1661b1b3ea41fe | tests/manage.py | tests/manage.py | #!/usr/bin/env python
import channels.log
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
def get_channels_logger(*args, **kwargs):
"""Return logger for channels."""
return logging.getLogger("django.channels")
# Force channels to respect logging configurations from settings:
# https://github.com/django/channels/issues/520
channels.log.setup_logger = get_channels_logger
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Fix logging compatibility with the latest Channels | Fix logging compatibility with the latest Channels
| Python | apache-2.0 | genialis/resolwe,jberci/resolwe,jberci/resolwe,genialis/resolwe | #!/usr/bin/env python
import channels.log
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
def get_channels_logger(*args, **kwargs):
"""Return logger for channels."""
return logging.getLogger("django.channels")
# Force channels to respect logging configurations from settings:
# https://github.com/django/channels/issues/520
channels.log.setup_logger = get_channels_logger
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Fix logging compatibility with the latest Channels | #!/usr/bin/env python
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
import channels.log
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
def get_channels_logger(*args, **kwargs):
"""Return logger for channels."""
return logging.getLogger("django.channels")
# Force channels to respect logging configurations from settings:
# https://github.com/django/channels/issues/520
channels.log.setup_logger = get_channels_logger
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Fix logging compatibility with the latest Channels<commit_after> | #!/usr/bin/env python
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import channels.log
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
def get_channels_logger(*args, **kwargs):
"""Return logger for channels."""
return logging.getLogger("django.channels")
# Force channels to respect logging configurations from settings:
# https://github.com/django/channels/issues/520
channels.log.setup_logger = get_channels_logger
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Fix logging compatibility with the latest Channels#!/usr/bin/env python
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
import channels.log
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
def get_channels_logger(*args, **kwargs):
"""Return logger for channels."""
return logging.getLogger("django.channels")
# Force channels to respect logging configurations from settings:
# https://github.com/django/channels/issues/520
channels.log.setup_logger = get_channels_logger
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Fix logging compatibility with the latest Channels<commit_after>#!/usr/bin/env python
import logging
import os
import sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
# if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
# raise ValueError('This Django project is not intended for running a server.')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
9bd88d618223b47a520bfd3e2f70ae1cfcc3b02e | {{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/urls.py | {{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/urls.py | from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'', include(wagtail_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Add views for testing 404 and 500 templates
urlpatterns += [
url(r'^test404/$', TemplateView.as_view(template_name='404.html')),
url(r'^test500/$', TemplateView.as_view(template_name='500.html')),
]
urlpatterns += [
url(r'', include(wagtail_urls)),
]
| Add views for testing 404 and 500 templates | Add views for testing 404 and 500 templates
| Python | bsd-3-clause | torchbox/cookiecutter-wagtail,torchbox/wagtail-cookiecutter,RocketPod/wagtail-cookiecutter,torchbox/cookiecutter-wagtail,torchbox/cookiecutter-wagtail,RocketPod/wagtail-cookiecutter,RocketPod/wagtail-cookiecutter,torchbox/cookiecutter-wagtail,torchbox/wagtail-cookiecutter,torchbox/wagtail-cookiecutter,torchbox/wagtail-cookiecutter | from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'', include(wagtail_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Add views for testing 404 and 500 templates | from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Add views for testing 404 and 500 templates
urlpatterns += [
url(r'^test404/$', TemplateView.as_view(template_name='404.html')),
url(r'^test500/$', TemplateView.as_view(template_name='500.html')),
]
urlpatterns += [
url(r'', include(wagtail_urls)),
]
| <commit_before>from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'', include(wagtail_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Add views for testing 404 and 500 templates<commit_after> | from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Add views for testing 404 and 500 templates
urlpatterns += [
url(r'^test404/$', TemplateView.as_view(template_name='404.html')),
url(r'^test500/$', TemplateView.as_view(template_name='500.html')),
]
urlpatterns += [
url(r'', include(wagtail_urls)),
]
| from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'', include(wagtail_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Add views for testing 404 and 500 templatesfrom django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Add views for testing 404 and 500 templates
urlpatterns += [
url(r'^test404/$', TemplateView.as_view(template_name='404.html')),
url(r'^test500/$', TemplateView.as_view(template_name='500.html')),
]
urlpatterns += [
url(r'', include(wagtail_urls)),
]
| <commit_before>from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'', include(wagtail_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Add views for testing 404 and 500 templates<commit_after>from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
admin.autodiscover()
urlpatterns = [
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
]
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Add views for testing 404 and 500 templates
urlpatterns += [
url(r'^test404/$', TemplateView.as_view(template_name='404.html')),
url(r'^test500/$', TemplateView.as_view(template_name='500.html')),
]
urlpatterns += [
url(r'', include(wagtail_urls)),
]
|
d197f74334d1733189f77dd3b12cb7db934ccd18 | lc0007_reverse_integer.py | lc0007_reverse_integer.py | """Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
| """Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
# If reversed integer is overflow.
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
| Add comments & space lines | Add comments & space lines
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | """Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
Add comments & space lines | """Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
# If reversed integer is overflow.
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
| <commit_before>"""Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
<commit_msg>Add comments & space lines<commit_after> | """Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
# If reversed integer is overflow.
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
| """Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
Add comments & space lines"""Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
# If reversed integer is overflow.
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
| <commit_before>"""Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
<commit_msg>Add comments & space lines<commit_after>"""Leetcode 7. Reverse Integer
Easy
URL: https://leetcode.com/problems/reverse-integer/description/
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Note:
The input is assumed to be a 32-bit signed integer.
Your function should return 0 when the reversed integer overflows.
"""
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
Time complexity: O(1).
Space complexity: O(1).
"""
# Since input x is a 32-bit integer, -2^31 <= x <= 2^31 - 1.
if x < 0:
x_rev = int(str(x)[::-1][-1] + str(x)[::-1][:-1])
else:
x_rev = int(str(x)[::-1])
# If reversed integer is overflow.
if abs(x_rev) > 0x7FFFFFFF:
x_rev = 0
return x_rev
def main():
print Solution().reverse(123)
print Solution().reverse(-123)
print Solution().reverse(-pow(2, 31))
if __name__ == '__main__':
main()
|
1838a160221859a40d208bc95352b105c53edb5f | partner_communication_switzerland/models/res_users.py | partner_communication_switzerland/models/res_users.py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
| # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
from odoo.addons.auth_signup.models.res_partner import now
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
expiration = now(days=+1)
self.mapped('partner_id').signup_prepare(
signup_type="reset", expiration=expiration)
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
| FIX password reset method that was not resetting the password | FIX password reset method that was not resetting the password
| Python | agpl-3.0 | CompassionCH/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
FIX password reset method that was not resetting the password | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
from odoo.addons.auth_signup.models.res_partner import now
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
expiration = now(days=+1)
self.mapped('partner_id').signup_prepare(
signup_type="reset", expiration=expiration)
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
| <commit_before># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
<commit_msg>FIX password reset method that was not resetting the password<commit_after> | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
from odoo.addons.auth_signup.models.res_partner import now
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
expiration = now(days=+1)
self.mapped('partner_id').signup_prepare(
signup_type="reset", expiration=expiration)
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
| # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
FIX password reset method that was not resetting the password# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
from odoo.addons.auth_signup.models.res_partner import now
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
expiration = now(days=+1)
self.mapped('partner_id').signup_prepare(
signup_type="reset", expiration=expiration)
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
| <commit_before># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
<commit_msg>FIX password reset method that was not resetting the password<commit_after># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models
from odoo.addons.auth_signup.models.res_partner import now
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
@api.multi
def action_reset_password(self):
create_mode = bool(self.env.context.get('create_user'))
# Only override the rest behavior, not normal signup
if create_mode:
super(ResUsers, self).action_reset_password()
else:
expiration = now(days=+1)
self.mapped('partner_id').signup_prepare(
signup_type="reset", expiration=expiration)
config = self.env.ref(
'partner_communication_switzerland.reset_password_email')
for user in self:
self.env['partner.communication.job'].create({
'partner_id': user.partner_id.id,
'config_id': config.id
})
@api.multi
def _compute_signature_letter(self):
""" Translate country in Signature (for Compassion Switzerland) """
for user in self:
employee = user.employee_ids
signature = ''
if len(employee) == 1:
signature += employee.name + '<br/>'
if employee.department_id:
signature += employee.department_id.name + '<br/>'
signature += user.company_id.name.split(' ')[0] + ' '
signature += user.company_id.country_id.name
user.signature_letter = signature
|
b071e9c5ac8ae479c8c5ab38c2e0a886c846b0e5 | pybossa/repositories/project_stats_repository.py | pybossa/repositories/project_stats_repository.py | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
desc=False, **filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, **filters)
| # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
fulltextsearch=None, desc=False, orderby='id',
**filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, fulltextsearch, desc, orderby,
**filters)
| Add desc and orderby to repo | Add desc and orderby to repo
| Python | agpl-3.0 | Scifabric/pybossa,PyBossa/pybossa,Scifabric/pybossa,PyBossa/pybossa | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
desc=False, **filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, **filters)
Add desc and orderby to repo | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
fulltextsearch=None, desc=False, orderby='id',
**filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, fulltextsearch, desc, orderby,
**filters)
| <commit_before># -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
desc=False, **filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, **filters)
<commit_msg>Add desc and orderby to repo<commit_after> | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
fulltextsearch=None, desc=False, orderby='id',
**filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, fulltextsearch, desc, orderby,
**filters)
| # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
desc=False, **filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, **filters)
Add desc and orderby to repo# -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
fulltextsearch=None, desc=False, orderby='id',
**filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, fulltextsearch, desc, orderby,
**filters)
| <commit_before># -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
desc=False, **filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, **filters)
<commit_msg>Add desc and orderby to repo<commit_after># -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import or_, func
from sqlalchemy.exc import IntegrityError
from pybossa.repositories import Repository
from pybossa.model.project_stats import ProjectStats
from pybossa.exc import WrongObjectError, DBIntegrityError
class ProjectStatsRepository(Repository):
def __init__(self, db):
self.db = db
def get(self, id):
return self.db.session.query(ProjectStats).get(id)
def filter_by(self, limit=None, offset=0, yielded=False, last_id=None,
fulltextsearch=None, desc=False, orderby='id',
**filters):
return self._filter_by(ProjectStats, limit, offset, yielded,
last_id, fulltextsearch, desc, orderby,
**filters)
|
0da4c663e8a48bb759a140ca304ce35d3a8b5dcf | pyconde/events/templatetags/event_tags.py | pyconde/events/templatetags/event_tags.py | import datetime
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=3):
now = datetime.datetime.now()
events = models.Event.objects.filter(date__gte=now).all()[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
| from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=None):
events = models.Event.objects.all()
if number_of_events is not None:
events = events[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
| Remove future-restriction on list_events tag | Remove future-restriction on list_events tag
| Python | bsd-3-clause | zerok/pyconde-website-mirror,EuroPython/djep,EuroPython/djep,EuroPython/djep,pysv/djep,pysv/djep,EuroPython/djep,zerok/pyconde-website-mirror,pysv/djep,pysv/djep,pysv/djep,zerok/pyconde-website-mirror | import datetime
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=3):
now = datetime.datetime.now()
events = models.Event.objects.filter(date__gte=now).all()[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
Remove future-restriction on list_events tag | from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=None):
events = models.Event.objects.all()
if number_of_events is not None:
events = events[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
| <commit_before>import datetime
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=3):
now = datetime.datetime.now()
events = models.Event.objects.filter(date__gte=now).all()[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
<commit_msg>Remove future-restriction on list_events tag<commit_after> | from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=None):
events = models.Event.objects.all()
if number_of_events is not None:
events = events[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
| import datetime
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=3):
now = datetime.datetime.now()
events = models.Event.objects.filter(date__gte=now).all()[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
Remove future-restriction on list_events tagfrom django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=None):
events = models.Event.objects.all()
if number_of_events is not None:
events = events[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
| <commit_before>import datetime
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=3):
now = datetime.datetime.now()
events = models.Event.objects.filter(date__gte=now).all()[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
<commit_msg>Remove future-restriction on list_events tag<commit_after>from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=None):
events = models.Event.objects.all()
if number_of_events is not None:
events = events[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
|
2114527f8de7b7e5175b43c54b4b84db2f169a01 | djangocms_forms/migrations/0004_redirect_delay.py | djangocms_forms/migrations/0004_redirect_delay.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting."),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting. 1000 milliseconds = 1 second."),
),
]
| Update migrations — `verbose_name` for `redirect_delay` fields | Update migrations — `verbose_name` for `redirect_delay` fields
| Python | bsd-3-clause | mishbahr/djangocms-forms,mishbahr/djangocms-forms,mishbahr/djangocms-forms | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting."),
),
]
Update migrations — `verbose_name` for `redirect_delay` fields | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting. 1000 milliseconds = 1 second."),
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting."),
),
]
<commit_msg>Update migrations — `verbose_name` for `redirect_delay` fields<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting. 1000 milliseconds = 1 second."),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting."),
),
]
Update migrations — `verbose_name` for `redirect_delay` fields# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting. 1000 milliseconds = 1 second."),
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting."),
),
]
<commit_msg>Update migrations — `verbose_name` for `redirect_delay` fields<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('djangocms_forms', '0003_add_referrer_field'),
]
operations = [
migrations.AddField(
model_name='formdefinition',
name='redirect_delay',
field=models.PositiveIntegerField(verbose_name='Redirect Delay', blank=True, null=True, help_text="Wait this number of milliseconds before redirecting. 1000 milliseconds = 1 second."),
),
]
|
8b84353d366daf1b1f0a19aff51d9d817428c6b9 | primestg/message.py | primestg/message.py | from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
return binascii.hexlify(content[:2].encode('utf-8')) == b'1f8b'
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
| from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
signature = content[:2]
try:
res = binascii.hexlify(signature) == b'1f8b'
except:
res = binascii.hexlify(signature.encode('utf-8')) == b'1f8b'
return res
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
| FIX works with py3 binary data and string data | FIX works with py3 binary data and string data
| Python | agpl-3.0 | gisce/primestg | from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
return binascii.hexlify(content[:2].encode('utf-8')) == b'1f8b'
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
FIX works with py3 binary data and string data | from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
signature = content[:2]
try:
res = binascii.hexlify(signature) == b'1f8b'
except:
res = binascii.hexlify(signature.encode('utf-8')) == b'1f8b'
return res
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
| <commit_before>from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
return binascii.hexlify(content[:2].encode('utf-8')) == b'1f8b'
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
<commit_msg>FIX works with py3 binary data and string data<commit_after> | from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
signature = content[:2]
try:
res = binascii.hexlify(signature) == b'1f8b'
except:
res = binascii.hexlify(signature.encode('utf-8')) == b'1f8b'
return res
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
| from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
return binascii.hexlify(content[:2].encode('utf-8')) == b'1f8b'
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
FIX works with py3 binary data and string datafrom lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
signature = content[:2]
try:
res = binascii.hexlify(signature) == b'1f8b'
except:
res = binascii.hexlify(signature.encode('utf-8')) == b'1f8b'
return res
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
| <commit_before>from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
return binascii.hexlify(content[:2].encode('utf-8')) == b'1f8b'
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
<commit_msg>FIX works with py3 binary data and string data<commit_after>from lxml.objectify import fromstring
import binascii
import zlib
def is_gziped(content):
signature = content[:2]
try:
res = binascii.hexlify(signature) == b'1f8b'
except:
res = binascii.hexlify(signature.encode('utf-8')) == b'1f8b'
return res
class BaseMessage(object):
"""
Base XML message.
"""
def __init__(self, xml):
"""
Create an object of BaseMessage.
:param xml: a file object or a string with the XML
:return: an instance of BaseMessage
"""
self.objectified = xml
@property
def objectified(self):
"""
The XML objectified
:return: the XML objectified
"""
return self._objectified
@objectified.setter
def objectified(self, value):
"""
Objectify an XML
:param value: a file object or string with the XML
"""
if hasattr(value, 'read'):
value = value.read()
if is_gziped(value):
value = zlib.decompress(value, zlib.MAX_WBITS | 32)
self._xml = value
self._objectified = fromstring(self._xml)
class MessageS(BaseMessage):
"""
Message class for reports.
"""
pass
|
b63edf8067c070f988f2ded4e33592d33bbbcab5 | frappe/patches/v11_0/set_primary_key_in_series.py | frappe/patches/v11_0/set_primary_key_in_series.py | import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 2)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
| import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 1)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
| Check duplicates > 1 instead of > 2 | Check duplicates > 1 instead of > 2 | Python | mit | adityahase/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mhbu50/frappe,StrellaGroup/frappe,vjFaLk/frappe,vjFaLk/frappe,yashodhank/frappe,adityahase/frappe,yashodhank/frappe,adityahase/frappe,mhbu50/frappe,yashodhank/frappe,almeidapaulopt/frappe,frappe/frappe,saurabh6790/frappe,mhbu50/frappe,vjFaLk/frappe,vjFaLk/frappe,saurabh6790/frappe,saurabh6790/frappe,almeidapaulopt/frappe,yashodhank/frappe,frappe/frappe,saurabh6790/frappe,mhbu50/frappe,adityahase/frappe,frappe/frappe,StrellaGroup/frappe | import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 2)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
Check duplicates > 1 instead of > 2 | import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 1)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
| <commit_before>import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 2)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
<commit_msg>Check duplicates > 1 instead of > 2<commit_after> | import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 1)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
| import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 2)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
Check duplicates > 1 instead of > 2import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 1)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
| <commit_before>import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 2)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
<commit_msg>Check duplicates > 1 instead of > 2<commit_after>import frappe
def execute():
keys_encountered = set()
#if current = 0, simply delete the key as it'll be recreated on first entry
frappe.db.sql('delete from `tabSeries` where current = 0')
duplicate_keys = frappe.db.sql('''
SELECT distinct name, current
from
`tabSeries`
where
name in (Select name from `tabSeries` group by name having count(name) > 1)
''', as_dict=True)
for row in duplicate_keys:
if row.name in keys_encountered:
frappe.throw('''
Key {row.name} appears twice in `tabSeries` with different values.
Kindly remove the faulty one manually before continuing
'''.format(row=row))
frappe.db.sql('delete from `tabSeries` where name = %(key)s', {
'key': row.name
})
if row.current:
frappe.db.sql('insert into `tabSeries`(`name`, `current`) values (%(name)s, %(current)s)', row)
keys_encountered.add(row.name)
frappe.db.commit()
frappe.db.sql('ALTER table `tabSeries` ADD PRIMARY KEY IF NOT EXISTS (name)')
|
834a6a65f144e17f22851230d2baf3524f5e98c0 | flexget/plugins/est_released.py | flexget/plugins/est_released.py | import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
return estimator.instance.estimate(entry)
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
| import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
estimate = estimator.instance.estimate(entry)
# return first successful estimation
if estimate is not None:
return estimate
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
| Fix estimator loop, consider rest plugins as well. | Fix estimator loop, consider rest plugins as well.
| Python | mit | ianstalk/Flexget,qk4l/Flexget,malkavi/Flexget,crawln45/Flexget,tobinjt/Flexget,xfouloux/Flexget,asm0dey/Flexget,Flexget/Flexget,ibrahimkarahan/Flexget,vfrc2/Flexget,jacobmetrick/Flexget,crawln45/Flexget,tarzasai/Flexget,Pretagonist/Flexget,xfouloux/Flexget,spencerjanssen/Flexget,lildadou/Flexget,JorisDeRieck/Flexget,jawilson/Flexget,tsnoam/Flexget,Danfocus/Flexget,ratoaq2/Flexget,tsnoam/Flexget,ibrahimkarahan/Flexget,offbyone/Flexget,OmgOhnoes/Flexget,camon/Flexget,X-dark/Flexget,Flexget/Flexget,asm0dey/Flexget,v17al/Flexget,ianstalk/Flexget,voriux/Flexget,ratoaq2/Flexget,X-dark/Flexget,tsnoam/Flexget,jawilson/Flexget,Flexget/Flexget,grrr2/Flexget,patsissons/Flexget,poulpito/Flexget,jacobmetrick/Flexget,camon/Flexget,malkavi/Flexget,lildadou/Flexget,poulpito/Flexget,qk4l/Flexget,tarzasai/Flexget,thalamus/Flexget,vfrc2/Flexget,Danfocus/Flexget,cvium/Flexget,tvcsantos/Flexget,ZefQ/Flexget,jacobmetrick/Flexget,dsemi/Flexget,tobinjt/Flexget,ibrahimkarahan/Flexget,qk4l/Flexget,OmgOhnoes/Flexget,drwyrm/Flexget,gazpachoking/Flexget,dsemi/Flexget,malkavi/Flexget,oxc/Flexget,malkavi/Flexget,grrr2/Flexget,vfrc2/Flexget,drwyrm/Flexget,drwyrm/Flexget,asm0dey/Flexget,poulpito/Flexget,xfouloux/Flexget,LynxyssCZ/Flexget,spencerjanssen/Flexget,JorisDeRieck/Flexget,jawilson/Flexget,grrr2/Flexget,tobinjt/Flexget,antivirtel/Flexget,Danfocus/Flexget,patsissons/Flexget,ianstalk/Flexget,lildadou/Flexget,Flexget/Flexget,ZefQ/Flexget,LynxyssCZ/Flexget,dsemi/Flexget,tobinjt/Flexget,oxc/Flexget,oxc/Flexget,voriux/Flexget,thalamus/Flexget,qvazzler/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,OmgOhnoes/Flexget,cvium/Flexget,v17al/Flexget,ZefQ/Flexget,sean797/Flexget,offbyone/Flexget,sean797/Flexget,crawln45/Flexget,Pretagonist/Flexget,qvazzler/Flexget,offbyone/Flexget,patsissons/Flexget,tvcsantos/Flexget,Pretagonist/Flexget,ratoaq2/Flexget,LynxyssCZ/Flexget,v17al/Flexget,qvazzler/Flexget,gazpachoking/Flexget,crawln45/Flexget,tarzasai/Flexget,cvium/Flexget,LynxyssCZ/Flexget,thalamus/Flexget,X-dark/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,jawilson/Flexget,Danfocus/Flexget,sean797/Flexget,spencerjanssen/Flexget | import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
return estimator.instance.estimate(entry)
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
Fix estimator loop, consider rest plugins as well. | import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
estimate = estimator.instance.estimate(entry)
# return first successful estimation
if estimate is not None:
return estimate
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
| <commit_before>import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
return estimator.instance.estimate(entry)
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
<commit_msg>Fix estimator loop, consider rest plugins as well.<commit_after> | import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
estimate = estimator.instance.estimate(entry)
# return first successful estimation
if estimate is not None:
return estimate
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
| import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
return estimator.instance.estimate(entry)
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
Fix estimator loop, consider rest plugins as well.import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
estimate = estimator.instance.estimate(entry)
# return first successful estimation
if estimate is not None:
return estimate
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
| <commit_before>import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
return estimator.instance.estimate(entry)
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
<commit_msg>Fix estimator loop, consider rest plugins as well.<commit_after>import logging
from flexget.plugin import get_plugins_by_group, register_plugin
log = logging.getLogger('est_released')
class EstimateRelease(object):
"""
Front-end for estimator plugins that estimate release times
for various things (series, movies).
"""
def estimate(self, entry):
"""
Estimate release schedule for Entry
:param entry:
:return: estimated date of released for the entry, None if it can't figure it out
"""
log.info(entry['title'])
estimators = get_plugins_by_group('estimate_release')
for estimator in estimators:
estimate = estimator.instance.estimate(entry)
# return first successful estimation
if estimate is not None:
return estimate
register_plugin(EstimateRelease, 'estimate_release', api_ver=2)
|
965dc806c5577fea89f1fcf78e3cdfcbff84b65f | moto/iam/exceptions.py | moto/iam/exceptions.py | from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"Not Found", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
| from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"NoSuchEntity", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
| Fix the error code for IAMNotFoundException to NoSuchEntity used by AWS. | Fix the error code for IAMNotFoundException to NoSuchEntity used by AWS.
| Python | apache-2.0 | spulec/moto,spulec/moto,botify-labs/moto,kefo/moto,kefo/moto,william-richard/moto,dbfr3qs/moto,Affirm/moto,ZuluPro/moto,kefo/moto,botify-labs/moto,botify-labs/moto,whummer/moto,Affirm/moto,botify-labs/moto,ZuluPro/moto,Brett55/moto,Brett55/moto,Affirm/moto,rocky4570/moto,william-richard/moto,rocky4570/moto,whummer/moto,okomestudio/moto,dbfr3qs/moto,Brett55/moto,Brett55/moto,Affirm/moto,Affirm/moto,whummer/moto,whummer/moto,botify-labs/moto,ZuluPro/moto,dbfr3qs/moto,spulec/moto,kefo/moto,dbfr3qs/moto,Affirm/moto,Brett55/moto,okomestudio/moto,spulec/moto,Brett55/moto,rocky4570/moto,william-richard/moto,whummer/moto,ZuluPro/moto,william-richard/moto,william-richard/moto,spulec/moto,whummer/moto,ZuluPro/moto,rocky4570/moto,rocky4570/moto,dbfr3qs/moto,dbfr3qs/moto,spulec/moto,okomestudio/moto,okomestudio/moto,kefo/moto,rocky4570/moto,ZuluPro/moto,botify-labs/moto,okomestudio/moto,okomestudio/moto,william-richard/moto | from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"Not Found", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
Fix the error code for IAMNotFoundException to NoSuchEntity used by AWS. | from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"NoSuchEntity", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
| <commit_before>from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"Not Found", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
<commit_msg>Fix the error code for IAMNotFoundException to NoSuchEntity used by AWS.<commit_after> | from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"NoSuchEntity", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
| from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"Not Found", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
Fix the error code for IAMNotFoundException to NoSuchEntity used by AWS.from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"NoSuchEntity", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
| <commit_before>from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"Not Found", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
<commit_msg>Fix the error code for IAMNotFoundException to NoSuchEntity used by AWS.<commit_after>from __future__ import unicode_literals
from moto.core.exceptions import RESTError
class IAMNotFoundException(RESTError):
code = 404
def __init__(self, message):
super(IAMNotFoundException, self).__init__(
"NoSuchEntity", message)
class IAMConflictException(RESTError):
code = 409
def __init__(self, code='Conflict', message=""):
super(IAMConflictException, self).__init__(
code, message)
class IAMReportNotPresentException(RESTError):
code = 410
def __init__(self, message):
super(IAMReportNotPresentException, self).__init__(
"ReportNotPresent", message)
|
ca563ca11fe04202ae38799ee992a48e0a01fd86 | material/admin/modules.py | material/admin/modules.py | from karenina import modules
class Admin(modules.InstallableModule):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff | from karenina import modules
class Admin(modules.Module):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff | Add module declaration for karenina | Add module declaration for karenina
| Python | bsd-3-clause | thiagoramos-luizalabs/django-material,refnode/django-material,lukasgarcya/django-material,viewflow/django-material,MonsterKiller/django-material,viewflow/django-material,barseghyanartur/django-material,MonsterKiller/django-material,un33k/django-material,afifnz/django-material,Axelio/django-material,viewflow/django-material,koopauy/django-material,barseghyanartur/django-material,pombredanne/django-material,pombredanne/django-material,koopauy/django-material,Axelio/django-material,koopauy/django-material,lukasgarcya/django-material,sourabhdattawad/django-material,2947721120/django-material,refnode/django-material,sourabhdattawad/django-material,pombredanne/django-material,barseghyanartur/django-material,2947721120/django-material,un33k/django-material,un33k/django-material,thiagoramos-luizalabs/django-material,lukasgarcya/django-material,MonsterKiller/django-material,2947721120/django-material,Axelio/django-material,refnode/django-material,afifnz/django-material,afifnz/django-material,sourabhdattawad/django-material,thiagoramos-luizalabs/django-material | from karenina import modules
class Admin(modules.InstallableModule):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staffAdd module declaration for karenina | from karenina import modules
class Admin(modules.Module):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff | <commit_before>from karenina import modules
class Admin(modules.InstallableModule):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff<commit_msg>Add module declaration for karenina<commit_after> | from karenina import modules
class Admin(modules.Module):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff | from karenina import modules
class Admin(modules.InstallableModule):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staffAdd module declaration for kareninafrom karenina import modules
class Admin(modules.Module):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff | <commit_before>from karenina import modules
class Admin(modules.InstallableModule):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff<commit_msg>Add module declaration for karenina<commit_after>from karenina import modules
class Admin(modules.Module):
icon = "mdi-action-settings-applications"
order = 1000
@property
def label(self):
return 'Administration'
def has_perm(self, user):
return user.is_staff |
4cf56e47f27053bcfe01059427fceceb55d7da91 | labs/01_keras/solutions/keras_sgd_and_momentum.py | labs/01_keras/solutions/keras_sgd_and_momentum.py | model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
model.add(Activation("softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs. | model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.
| Remove leftover line in solution for 01_keras | Remove leftover line in solution for 01_keras
| Python | mit | m2dsupsdlclass/lectures-labs,m2dsupsdlclass/lectures-labs | model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
model.add(Activation("softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.Remove leftover line in solution for 01_keras | model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.
| <commit_before>model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
model.add(Activation("softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.<commit_msg>Remove leftover line in solution for 01_keras<commit_after> | model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.
| model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
model.add(Activation("softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.Remove leftover line in solution for 01_kerasmodel = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.
| <commit_before>model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
model.add(Activation("softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.<commit_msg>Remove leftover line in solution for 01_keras<commit_after>model = Sequential()
model.add(Dense(hidden_dim, input_dim=input_dim,
activation="tanh"))
model.add(Dense(output_dim, activation="softmax"))
optimizer = optimizers.SGD(lr=0.1, momentum=0.9, nesterov=True)
model.compile(optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, Y_train, validation_split=0.2,
epochs=15, batch_size=32)
fig, (ax0, ax1) = plt.subplots(nrows=2, sharex=True, figsize=(12, 6))
history_df = pd.DataFrame(history.history)
history_df["epoch"] = history.epoch
history_df.plot(x="epoch", y=["loss", "val_loss"], ax=ax0)
history_df.plot(x="epoch", y=["accuracy", "val_accuracy"], ax=ax1);
# Analysis:
#
# Setting the learning rate value to a small value (e.g. lr=0.001 on
# this dataset) makes the model train much slower (it has not
# converged yet after 15 epochs).
#
# Using momentum tends to mitigate the small learning rate / slow
# training problem a bit.
#
# Setting the learning rate to a very large value (e.g. lr=10)
# makes the model randomly bounce around a good local
# minimum and therefore prevent it to reach a low training loss even
# after 30 epochs.
|
5dcba5d842209791918ea67ea95895a23475a803 | apps/smeuhoverride/models.py | apps/smeuhoverride/models.py | from django.db import models
# Create your models here.
| from django.contrib.contenttypes.models import ContentType
from django.db.models import signals
from tagging.models import TaggedItem
def taggeditem_delete(sender, **kwargs):
"""
Deletes TaggedItems for ALL deleted model instances
Workaround for bug:
http://code.google.com/p/django-tagging/issues/detail?id=162
"""
deleted = kwargs['instance']
try:
id = int(deleted.pk)
except ValueError:
return
ctype = ContentType.objects.get_for_model(deleted)
item_tags = TaggedItem.objects.filter(
content_type=ctype,
object_id=id,
)
item_tags.delete()
signals.post_delete.connect(taggeditem_delete)
| Delete tags for deleted items | Delete tags for deleted items
| Python | mit | amarandon/smeuhsocial,amarandon/smeuhsocial,fgirault/smeuhsocial,fgirault/smeuhsocial,amarandon/smeuhsocial,fgirault/smeuhsocial | from django.db import models
# Create your models here.
Delete tags for deleted items | from django.contrib.contenttypes.models import ContentType
from django.db.models import signals
from tagging.models import TaggedItem
def taggeditem_delete(sender, **kwargs):
"""
Deletes TaggedItems for ALL deleted model instances
Workaround for bug:
http://code.google.com/p/django-tagging/issues/detail?id=162
"""
deleted = kwargs['instance']
try:
id = int(deleted.pk)
except ValueError:
return
ctype = ContentType.objects.get_for_model(deleted)
item_tags = TaggedItem.objects.filter(
content_type=ctype,
object_id=id,
)
item_tags.delete()
signals.post_delete.connect(taggeditem_delete)
| <commit_before>from django.db import models
# Create your models here.
<commit_msg>Delete tags for deleted items<commit_after> | from django.contrib.contenttypes.models import ContentType
from django.db.models import signals
from tagging.models import TaggedItem
def taggeditem_delete(sender, **kwargs):
"""
Deletes TaggedItems for ALL deleted model instances
Workaround for bug:
http://code.google.com/p/django-tagging/issues/detail?id=162
"""
deleted = kwargs['instance']
try:
id = int(deleted.pk)
except ValueError:
return
ctype = ContentType.objects.get_for_model(deleted)
item_tags = TaggedItem.objects.filter(
content_type=ctype,
object_id=id,
)
item_tags.delete()
signals.post_delete.connect(taggeditem_delete)
| from django.db import models
# Create your models here.
Delete tags for deleted itemsfrom django.contrib.contenttypes.models import ContentType
from django.db.models import signals
from tagging.models import TaggedItem
def taggeditem_delete(sender, **kwargs):
"""
Deletes TaggedItems for ALL deleted model instances
Workaround for bug:
http://code.google.com/p/django-tagging/issues/detail?id=162
"""
deleted = kwargs['instance']
try:
id = int(deleted.pk)
except ValueError:
return
ctype = ContentType.objects.get_for_model(deleted)
item_tags = TaggedItem.objects.filter(
content_type=ctype,
object_id=id,
)
item_tags.delete()
signals.post_delete.connect(taggeditem_delete)
| <commit_before>from django.db import models
# Create your models here.
<commit_msg>Delete tags for deleted items<commit_after>from django.contrib.contenttypes.models import ContentType
from django.db.models import signals
from tagging.models import TaggedItem
def taggeditem_delete(sender, **kwargs):
"""
Deletes TaggedItems for ALL deleted model instances
Workaround for bug:
http://code.google.com/p/django-tagging/issues/detail?id=162
"""
deleted = kwargs['instance']
try:
id = int(deleted.pk)
except ValueError:
return
ctype = ContentType.objects.get_for_model(deleted)
item_tags = TaggedItem.objects.filter(
content_type=ctype,
object_id=id,
)
item_tags.delete()
signals.post_delete.connect(taggeditem_delete)
|
d4e721e3179c1f3fbce283b96b937fa4864786c3 | src/amber/hokuyo/hokuyo.py | src/amber/hokuyo/hokuyo.py | import serial
import sys
import os
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller() | import logging.config
import sys
import os
import time
import serial
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
LOGGER_NAME = 'AmberPipes'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
logging.config.fileConfig('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
logger = logging.getLogger(LOGGER_NAME)
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
while True:
# noinspection PyBroadException
try:
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()
except BaseException as e:
logger.error('error: %s' % str(e))
time.sleep(5) | Add restart mechanism for Hokuyo, update logging mechanism | Add restart mechanism for Hokuyo, update logging mechanism
| Python | mit | project-capo/amber-python-drivers,project-capo/amber-python-drivers | import serial
import sys
import os
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()Add restart mechanism for Hokuyo, update logging mechanism | import logging.config
import sys
import os
import time
import serial
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
LOGGER_NAME = 'AmberPipes'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
logging.config.fileConfig('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
logger = logging.getLogger(LOGGER_NAME)
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
while True:
# noinspection PyBroadException
try:
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()
except BaseException as e:
logger.error('error: %s' % str(e))
time.sleep(5) | <commit_before>import serial
import sys
import os
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()<commit_msg>Add restart mechanism for Hokuyo, update logging mechanism<commit_after> | import logging.config
import sys
import os
import time
import serial
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
LOGGER_NAME = 'AmberPipes'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
logging.config.fileConfig('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
logger = logging.getLogger(LOGGER_NAME)
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
while True:
# noinspection PyBroadException
try:
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()
except BaseException as e:
logger.error('error: %s' % str(e))
time.sleep(5) | import serial
import sys
import os
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()Add restart mechanism for Hokuyo, update logging mechanismimport logging.config
import sys
import os
import time
import serial
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
LOGGER_NAME = 'AmberPipes'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
logging.config.fileConfig('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
logger = logging.getLogger(LOGGER_NAME)
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
while True:
# noinspection PyBroadException
try:
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()
except BaseException as e:
logger.error('error: %s' % str(e))
time.sleep(5) | <commit_before>import serial
import sys
import os
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()<commit_msg>Add restart mechanism for Hokuyo, update logging mechanism<commit_after>import logging.config
import sys
import os
import time
import serial
from amber.hokuyo.hokuyo_common import HokuyoController
from amber.tools import serial_port, config
__author__ = 'paoolo'
LOGGER_NAME = 'AmberPipes'
pwd = os.path.dirname(os.path.abspath(__file__))
config.add_config_ini('%s/hokuyo.ini' % pwd)
logging.config.fileConfig('%s/hokuyo.ini' % pwd)
SERIAL_PORT = config.HOKUYO_SERIAL_PORT
BAUD_RATE = config.HOKUYO_BAUD_RATE
TIMEOUT = 0.1
if __name__ == '__main__':
logger = logging.getLogger(LOGGER_NAME)
serial = serial.Serial(port=SERIAL_PORT, baudrate=BAUD_RATE, timeout=TIMEOUT)
port = serial_port.SerialPort(serial)
while True:
# noinspection PyBroadException
try:
controller = HokuyoController(sys.stdin, sys.stdout, port)
controller()
except BaseException as e:
logger.error('error: %s' % str(e))
time.sleep(5) |
bd39a28e25dc8a3c79ef9b1b9ba7e6924a3f682b | test/test_basic.py | test/test_basic.py | #!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
self.assertIn(string, open('/tmp/q', 'r').read())
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
| #!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
logdata = open('/tmp/q', 'r').read()
try:
self.assertIn(string, logdata)
except AttributeError:
self.assertTrue(string in logdata)
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
| Work on older Python without assertIn method. | Work on older Python without assertIn method.
| Python | apache-2.0 | zestyping/q | #!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
self.assertIn(string, open('/tmp/q', 'r').read())
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
Work on older Python without assertIn method. | #!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
logdata = open('/tmp/q', 'r').read()
try:
self.assertIn(string, logdata)
except AttributeError:
self.assertTrue(string in logdata)
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
| <commit_before>#!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
self.assertIn(string, open('/tmp/q', 'r').read())
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
<commit_msg>Work on older Python without assertIn method.<commit_after> | #!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
logdata = open('/tmp/q', 'r').read()
try:
self.assertIn(string, logdata)
except AttributeError:
self.assertTrue(string in logdata)
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
| #!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
self.assertIn(string, open('/tmp/q', 'r').read())
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
Work on older Python without assertIn method.#!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
logdata = open('/tmp/q', 'r').read()
try:
self.assertIn(string, logdata)
except AttributeError:
self.assertTrue(string in logdata)
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
| <commit_before>#!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
self.assertIn(string, open('/tmp/q', 'r').read())
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
<commit_msg>Work on older Python without assertIn method.<commit_after>#!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
logdata = open('/tmp/q', 'r').read()
try:
self.assertIn(string, logdata)
except AttributeError:
self.assertTrue(string in logdata)
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
|
541592ba6d2a914b2b98ee11b8e31c7589b2b6d7 | mezzanine/project_template/manage.py | mezzanine/project_template/manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Remove whitespace from blank line | Remove whitespace from blank line | Python | bsd-2-clause | emile2016/mezzanine,molokov/mezzanine,ZeroXn/mezzanine,ryneeverett/mezzanine,promil23/mezzanine,dustinrb/mezzanine,viaregio/mezzanine,adrian-the-git/mezzanine,dsanders11/mezzanine,spookylukey/mezzanine,wyzex/mezzanine,saintbird/mezzanine,readevalprint/mezzanine,frankchin/mezzanine,jjz/mezzanine,mush42/mezzanine,theclanks/mezzanine,frankchin/mezzanine,gradel/mezzanine,eino-makitalo/mezzanine,nikolas/mezzanine,SoLoHiC/mezzanine,sjdines/mezzanine,christianwgd/mezzanine,jerivas/mezzanine,promil23/mezzanine,molokov/mezzanine,tuxinhang1989/mezzanine,spookylukey/mezzanine,douglaskastle/mezzanine,sjdines/mezzanine,emile2016/mezzanine,Cicero-Zhao/mezzanine,webounty/mezzanine,douglaskastle/mezzanine,sjuxax/mezzanine,wbtuomela/mezzanine,stephenmcd/mezzanine,sjuxax/mezzanine,mush42/mezzanine,webounty/mezzanine,wyzex/mezzanine,biomassives/mezzanine,Skytorn86/mezzanine,Cicero-Zhao/mezzanine,geodesign/mezzanine,christianwgd/mezzanine,vladir/mezzanine,frankier/mezzanine,dsanders11/mezzanine,Skytorn86/mezzanine,webounty/mezzanine,jerivas/mezzanine,dovydas/mezzanine,adrian-the-git/mezzanine,molokov/mezzanine,eino-makitalo/mezzanine,dustinrb/mezzanine,joshcartme/mezzanine,wbtuomela/mezzanine,stephenmcd/mezzanine,ryneeverett/mezzanine,joshcartme/mezzanine,gradel/mezzanine,PegasusWang/mezzanine,viaregio/mezzanine,promil23/mezzanine,eino-makitalo/mezzanine,Cajoline/mezzanine,viaregio/mezzanine,vladir/mezzanine,saintbird/mezzanine,frankchin/mezzanine,christianwgd/mezzanine,theclanks/mezzanine,saintbird/mezzanine,spookylukey/mezzanine,jjz/mezzanine,joshcartme/mezzanine,SoLoHiC/mezzanine,nikolas/mezzanine,Cajoline/mezzanine,sjdines/mezzanine,wyzex/mezzanine,dsanders11/mezzanine,gradel/mezzanine,dustinrb/mezzanine,dovydas/mezzanine,tuxinhang1989/mezzanine,tuxinhang1989/mezzanine,industrydive/mezzanine,sjuxax/mezzanine,readevalprint/mezzanine,industrydive/mezzanine,biomassives/mezzanine,jerivas/mezzanine,mush42/mezzanine,emile2016/mezzanine,PegasusWang/mezzanine,SoLoHiC/mezzanine,readevalprint/mezzanine,nikolas/mezzanine,biomassives/mezzanine,douglaskastle/mezzanine,dovydas/mezzanine,geodesign/mezzanine,adrian-the-git/mezzanine,ZeroXn/mezzanine,Skytorn86/mezzanine,geodesign/mezzanine,PegasusWang/mezzanine,frankier/mezzanine,vladir/mezzanine,jjz/mezzanine,theclanks/mezzanine,industrydive/mezzanine,stephenmcd/mezzanine,Cajoline/mezzanine,ryneeverett/mezzanine,ZeroXn/mezzanine,frankier/mezzanine,wbtuomela/mezzanine | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Remove whitespace from blank line | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Remove whitespace from blank line<commit_after> | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Remove whitespace from blank line#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Remove whitespace from blank line<commit_after>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
from mezzanine.utils.conf import real_project_name
settings_module = "%s.settings" % real_project_name("{{ project_name }}")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
f5984fbd4187f4af65fb39b070f91870203d869b | openedx/stanford/djangoapps/register_cme/admin.py | openedx/stanford/djangoapps/register_cme/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
admin.site.register(ExtraInfo)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
class ExtraInfoAdmin(admin.ModelAdmin):
""" Admin interface for ExtraInfo model. """
list_display = ('user', 'get_email', 'last_name', 'first_name',)
search_fields = ('user__username', 'user__email', 'last_name', 'first_name',)
def get_email(self, obj):
return obj.user.email
get_email.short_description = 'Email address'
class Meta(object):
model = ExtraInfo
admin.site.register(ExtraInfo, ExtraInfoAdmin)
| Change `ExtraInfo` to user fields, add search | Change `ExtraInfo` to user fields, add search
`Register_cme/extrainfo` in Django Admin was previously displaying users
as `ExtraInfo` objects which admins had to click on individually to see
each user's information. Each user is now displayed with fields:
username, email, last and first name. Username is clickable to view more
information. Added search bar enables search for users matching query
for username, email, last and first name.
| Python | agpl-3.0 | Stanford-Online/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
admin.site.register(ExtraInfo)
Change `ExtraInfo` to user fields, add search
`Register_cme/extrainfo` in Django Admin was previously displaying users
as `ExtraInfo` objects which admins had to click on individually to see
each user's information. Each user is now displayed with fields:
username, email, last and first name. Username is clickable to view more
information. Added search bar enables search for users matching query
for username, email, last and first name. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
class ExtraInfoAdmin(admin.ModelAdmin):
""" Admin interface for ExtraInfo model. """
list_display = ('user', 'get_email', 'last_name', 'first_name',)
search_fields = ('user__username', 'user__email', 'last_name', 'first_name',)
def get_email(self, obj):
return obj.user.email
get_email.short_description = 'Email address'
class Meta(object):
model = ExtraInfo
admin.site.register(ExtraInfo, ExtraInfoAdmin)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
admin.site.register(ExtraInfo)
<commit_msg>Change `ExtraInfo` to user fields, add search
`Register_cme/extrainfo` in Django Admin was previously displaying users
as `ExtraInfo` objects which admins had to click on individually to see
each user's information. Each user is now displayed with fields:
username, email, last and first name. Username is clickable to view more
information. Added search bar enables search for users matching query
for username, email, last and first name.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
class ExtraInfoAdmin(admin.ModelAdmin):
""" Admin interface for ExtraInfo model. """
list_display = ('user', 'get_email', 'last_name', 'first_name',)
search_fields = ('user__username', 'user__email', 'last_name', 'first_name',)
def get_email(self, obj):
return obj.user.email
get_email.short_description = 'Email address'
class Meta(object):
model = ExtraInfo
admin.site.register(ExtraInfo, ExtraInfoAdmin)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
admin.site.register(ExtraInfo)
Change `ExtraInfo` to user fields, add search
`Register_cme/extrainfo` in Django Admin was previously displaying users
as `ExtraInfo` objects which admins had to click on individually to see
each user's information. Each user is now displayed with fields:
username, email, last and first name. Username is clickable to view more
information. Added search bar enables search for users matching query
for username, email, last and first name.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
class ExtraInfoAdmin(admin.ModelAdmin):
""" Admin interface for ExtraInfo model. """
list_display = ('user', 'get_email', 'last_name', 'first_name',)
search_fields = ('user__username', 'user__email', 'last_name', 'first_name',)
def get_email(self, obj):
return obj.user.email
get_email.short_description = 'Email address'
class Meta(object):
model = ExtraInfo
admin.site.register(ExtraInfo, ExtraInfoAdmin)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
admin.site.register(ExtraInfo)
<commit_msg>Change `ExtraInfo` to user fields, add search
`Register_cme/extrainfo` in Django Admin was previously displaying users
as `ExtraInfo` objects which admins had to click on individually to see
each user's information. Each user is now displayed with fields:
username, email, last and first name. Username is clickable to view more
information. Added search bar enables search for users matching query
for username, email, last and first name.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
class ExtraInfoAdmin(admin.ModelAdmin):
""" Admin interface for ExtraInfo model. """
list_display = ('user', 'get_email', 'last_name', 'first_name',)
search_fields = ('user__username', 'user__email', 'last_name', 'first_name',)
def get_email(self, obj):
return obj.user.email
get_email.short_description = 'Email address'
class Meta(object):
model = ExtraInfo
admin.site.register(ExtraInfo, ExtraInfoAdmin)
|
19dc04eb48a9484540298aa9a15fca016486921b | shop/models/fields.py | shop/models/fields.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
if POSTGRES_FLAG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
| Add control over psycopg2's version (need to be bigger or equal than 2.5.4) | Add control over psycopg2's version (need to be bigger or equal than 2.5.4)
| Python | bsd-3-clause | nimbis/django-shop,nimbis/django-shop,jrief/django-shop,divio/django-shop,khchine5/django-shop,awesto/django-shop,divio/django-shop,khchine5/django-shop,divio/django-shop,awesto/django-shop,awesto/django-shop,jrief/django-shop,jrief/django-shop,nimbis/django-shop,khchine5/django-shop,nimbis/django-shop,jrief/django-shop,khchine5/django-shop | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
if POSTGRES_FLAG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
Add control over psycopg2's version (need to be bigger or equal than 2.5.4) | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
if POSTGRES_FLAG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
<commit_msg>Add control over psycopg2's version (need to be bigger or equal than 2.5.4)<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
if POSTGRES_FLAG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
Add control over psycopg2's version (need to be bigger or equal than 2.5.4)# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
if POSTGRES_FLAG:
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
<commit_msg>Add control over psycopg2's version (need to be bigger or equal than 2.5.4)<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils.version import LooseVersion
import re
from django.db import connection
POSTGRES_FLAG = False
if str(connection.vendor) == 'postgresql':
POSTGRES_FLAG = True
try:
import psycopg2
version = re.search('([0-9.]+)', psycopg2.__version__ or "0").group(0)
if POSTGRES_FLAG and (LooseVersion(version) >= LooseVersion('2.5.4')):
from django.contrib.postgres.fields import JSONField
else:
raise ImportError
except ImportError:
from jsonfield.fields import JSONField
class JSONFieldWrapper(JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': {}})
super(JSONFieldWrapper, self).__init__(*args, **kwargs)
|
b9c30e894c313400b7f3bfc007c39fa6e1d5ee76 | run_local_server.py | run_local_server.py | """Launch."""
from dnstwister import app
app.run(debug=True)
| """Launch."""
from dnstwister import app
# At least until https://github.com/pallets/flask/pull/1910 is merged...
app.jinja_env.auto_reload = True
app.run(debug=True)
| Fix for template reloading issues in newer version of flask | Fix for template reloading issues in newer version of flask
| Python | unlicense | thisismyrobot/dnstwister,thisismyrobot/dnstwister,thisismyrobot/dnstwister | """Launch."""
from dnstwister import app
app.run(debug=True)
Fix for template reloading issues in newer version of flask | """Launch."""
from dnstwister import app
# At least until https://github.com/pallets/flask/pull/1910 is merged...
app.jinja_env.auto_reload = True
app.run(debug=True)
| <commit_before>"""Launch."""
from dnstwister import app
app.run(debug=True)
<commit_msg>Fix for template reloading issues in newer version of flask<commit_after> | """Launch."""
from dnstwister import app
# At least until https://github.com/pallets/flask/pull/1910 is merged...
app.jinja_env.auto_reload = True
app.run(debug=True)
| """Launch."""
from dnstwister import app
app.run(debug=True)
Fix for template reloading issues in newer version of flask"""Launch."""
from dnstwister import app
# At least until https://github.com/pallets/flask/pull/1910 is merged...
app.jinja_env.auto_reload = True
app.run(debug=True)
| <commit_before>"""Launch."""
from dnstwister import app
app.run(debug=True)
<commit_msg>Fix for template reloading issues in newer version of flask<commit_after>"""Launch."""
from dnstwister import app
# At least until https://github.com/pallets/flask/pull/1910 is merged...
app.jinja_env.auto_reload = True
app.run(debug=True)
|
e500888a0fab56f5da3919a025a8f5ecf5bf1be3 | django_todo/apps/core/tests.py | django_todo/apps/core/tests.py | from django.test import TestCase
# Create your tests here.
| from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.test import TestCase
from django_todo.apps.core.models import Task
class TaskTestCase(TestCase):
def setUp(self):
self.user, created = User.objects.get_or_create(username='test_user', email='test@email.com', password='secret')
Task.objects.create(
description='Beautiful is better than ugly',
is_checked=False,
user=self.user)
Task.objects.create(
description='Simple is better than complex',
is_checked=True,
user=self.user,
date_done=datetime.now())
Task.objects.create(
description='Explicit is better than implicit',
is_checked=False,
user=self.user)
def test_pending_tasks_are_not_retrieved(self):
"""Pending tasks filter must not recover completed tasks."""
tasks = Task.objects.pending_tasks(self.user)
self.assertGreater(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
def test_pending_tasks_are_retrieved_in_order(self):
"""Newest tasks must be on top."""
tasks = Task.objects.pending_tasks(self.user)
self.assertNotEqual(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
# The last created task is the newest
self.assertEqual(tasks[0].description, 'Explicit is better than implicit')
| Add unit testing. Initial version | Add unit testing. Initial version
| Python | mit | maxicecilia/django_todo | from django.test import TestCase
# Create your tests here.
Add unit testing. Initial version | from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.test import TestCase
from django_todo.apps.core.models import Task
class TaskTestCase(TestCase):
def setUp(self):
self.user, created = User.objects.get_or_create(username='test_user', email='test@email.com', password='secret')
Task.objects.create(
description='Beautiful is better than ugly',
is_checked=False,
user=self.user)
Task.objects.create(
description='Simple is better than complex',
is_checked=True,
user=self.user,
date_done=datetime.now())
Task.objects.create(
description='Explicit is better than implicit',
is_checked=False,
user=self.user)
def test_pending_tasks_are_not_retrieved(self):
"""Pending tasks filter must not recover completed tasks."""
tasks = Task.objects.pending_tasks(self.user)
self.assertGreater(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
def test_pending_tasks_are_retrieved_in_order(self):
"""Newest tasks must be on top."""
tasks = Task.objects.pending_tasks(self.user)
self.assertNotEqual(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
# The last created task is the newest
self.assertEqual(tasks[0].description, 'Explicit is better than implicit')
| <commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Add unit testing. Initial version<commit_after> | from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.test import TestCase
from django_todo.apps.core.models import Task
class TaskTestCase(TestCase):
def setUp(self):
self.user, created = User.objects.get_or_create(username='test_user', email='test@email.com', password='secret')
Task.objects.create(
description='Beautiful is better than ugly',
is_checked=False,
user=self.user)
Task.objects.create(
description='Simple is better than complex',
is_checked=True,
user=self.user,
date_done=datetime.now())
Task.objects.create(
description='Explicit is better than implicit',
is_checked=False,
user=self.user)
def test_pending_tasks_are_not_retrieved(self):
"""Pending tasks filter must not recover completed tasks."""
tasks = Task.objects.pending_tasks(self.user)
self.assertGreater(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
def test_pending_tasks_are_retrieved_in_order(self):
"""Newest tasks must be on top."""
tasks = Task.objects.pending_tasks(self.user)
self.assertNotEqual(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
# The last created task is the newest
self.assertEqual(tasks[0].description, 'Explicit is better than implicit')
| from django.test import TestCase
# Create your tests here.
Add unit testing. Initial versionfrom datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.test import TestCase
from django_todo.apps.core.models import Task
class TaskTestCase(TestCase):
def setUp(self):
self.user, created = User.objects.get_or_create(username='test_user', email='test@email.com', password='secret')
Task.objects.create(
description='Beautiful is better than ugly',
is_checked=False,
user=self.user)
Task.objects.create(
description='Simple is better than complex',
is_checked=True,
user=self.user,
date_done=datetime.now())
Task.objects.create(
description='Explicit is better than implicit',
is_checked=False,
user=self.user)
def test_pending_tasks_are_not_retrieved(self):
"""Pending tasks filter must not recover completed tasks."""
tasks = Task.objects.pending_tasks(self.user)
self.assertGreater(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
def test_pending_tasks_are_retrieved_in_order(self):
"""Newest tasks must be on top."""
tasks = Task.objects.pending_tasks(self.user)
self.assertNotEqual(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
# The last created task is the newest
self.assertEqual(tasks[0].description, 'Explicit is better than implicit')
| <commit_before>from django.test import TestCase
# Create your tests here.
<commit_msg>Add unit testing. Initial version<commit_after>from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.test import TestCase
from django_todo.apps.core.models import Task
class TaskTestCase(TestCase):
def setUp(self):
self.user, created = User.objects.get_or_create(username='test_user', email='test@email.com', password='secret')
Task.objects.create(
description='Beautiful is better than ugly',
is_checked=False,
user=self.user)
Task.objects.create(
description='Simple is better than complex',
is_checked=True,
user=self.user,
date_done=datetime.now())
Task.objects.create(
description='Explicit is better than implicit',
is_checked=False,
user=self.user)
def test_pending_tasks_are_not_retrieved(self):
"""Pending tasks filter must not recover completed tasks."""
tasks = Task.objects.pending_tasks(self.user)
self.assertGreater(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
def test_pending_tasks_are_retrieved_in_order(self):
"""Newest tasks must be on top."""
tasks = Task.objects.pending_tasks(self.user)
self.assertNotEqual(len(tasks), 0)
self.assertEqual(tasks[0].is_checked, False)
# The last created task is the newest
self.assertEqual(tasks[0].description, 'Explicit is better than implicit')
|
f38081a1e8d136f5fab36090b177b7f12b3d25d9 | promgen/sender/__init__.py | promgen/sender/__init__.py | import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self._send.delay(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send.delay(target, alert, {'externalURL': ''})
| import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def __init__(self):
# In case some of our sender plugins are not using celery,
# We store our calling function in self.__send so that send()
# and test() can call the correct function while leaving the
# original function alone in case it needs to be called directly
if hasattr(self._send, 'delay'):
self.__send = self._send.delay
else:
self.__send = self._send
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self.__send(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self.__send(target, alert, {'externalURL': ''})
| Support both celery _send task and non-celery _send method | Support both celery _send task and non-celery _send method
| Python | mit | kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen | import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self._send.delay(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send.delay(target, alert, {'externalURL': ''})
Support both celery _send task and non-celery _send method | import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def __init__(self):
# In case some of our sender plugins are not using celery,
# We store our calling function in self.__send so that send()
# and test() can call the correct function while leaving the
# original function alone in case it needs to be called directly
if hasattr(self._send, 'delay'):
self.__send = self._send.delay
else:
self.__send = self._send
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self.__send(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self.__send(target, alert, {'externalURL': ''})
| <commit_before>import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self._send.delay(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send.delay(target, alert, {'externalURL': ''})
<commit_msg>Support both celery _send task and non-celery _send method<commit_after> | import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def __init__(self):
# In case some of our sender plugins are not using celery,
# We store our calling function in self.__send so that send()
# and test() can call the correct function while leaving the
# original function alone in case it needs to be called directly
if hasattr(self._send, 'delay'):
self.__send = self._send.delay
else:
self.__send = self._send
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self.__send(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self.__send(target, alert, {'externalURL': ''})
| import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self._send.delay(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send.delay(target, alert, {'externalURL': ''})
Support both celery _send task and non-celery _send methodimport logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def __init__(self):
# In case some of our sender plugins are not using celery,
# We store our calling function in self.__send so that send()
# and test() can call the correct function while leaving the
# original function alone in case it needs to be called directly
if hasattr(self._send, 'delay'):
self.__send = self._send.delay
else:
self.__send = self._send
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self.__send(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self.__send(target, alert, {'externalURL': ''})
| <commit_before>import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self._send.delay(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self._send.delay(target, alert, {'externalURL': ''})
<commit_msg>Support both celery _send task and non-celery _send method<commit_after>import logging
from promgen.models import Sender
logger = logging.getLogger(__name__)
class SenderBase(object):
def __init__(self):
# In case some of our sender plugins are not using celery,
# We store our calling function in self.__send so that send()
# and test() can call the correct function while leaving the
# original function alone in case it needs to be called directly
if hasattr(self._send, 'delay'):
self.__send = self._send.delay
else:
self.__send = self._send
def send(self, data):
sent = 0
for alert in data['alerts']:
project = alert['labels'].get('project')
for sender in Sender.objects.filter(sender=self.__module__, project__name=project):
if self.__send(sender.value, alert, data):
logger.debug('Sent %s for %s', self.__module__, project)
sent += 1
if sent == 0:
logger.debug('No senders configured for %s->%s', project, self.__module__)
return sent
def test(self, target, alert):
logger.debug('Sending test message to %s', target)
self.__send(target, alert, {'externalURL': ''})
|
1d26fddd3fb1581138117b2fbeeb21877bc48883 | sample_app/utils.py | sample_app/utils.py |
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in values[0].keys():
table_rows.append([key.title()])
table_rows[-1].extend(
ticket_type[key]
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
|
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in sorted(values[0].keys()):
table_rows.append([key.title()])
table_rows[-1].extend(
'${}'.format(ticket_type[key])
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
| Add dollar sign to fares, sort by fare type | Add dollar sign to fares, sort by fare type
| Python | mit | Mause/pytransperth,Mause/pytransperth |
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in values[0].keys():
table_rows.append([key.title()])
table_rows[-1].extend(
ticket_type[key]
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
Add dollar sign to fares, sort by fare type |
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in sorted(values[0].keys()):
table_rows.append([key.title()])
table_rows[-1].extend(
'${}'.format(ticket_type[key])
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
| <commit_before>
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in values[0].keys():
table_rows.append([key.title()])
table_rows[-1].extend(
ticket_type[key]
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
<commit_msg>Add dollar sign to fares, sort by fare type<commit_after> |
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in sorted(values[0].keys()):
table_rows.append([key.title()])
table_rows[-1].extend(
'${}'.format(ticket_type[key])
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
|
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in values[0].keys():
table_rows.append([key.title()])
table_rows[-1].extend(
ticket_type[key]
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
Add dollar sign to fares, sort by fare type
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in sorted(values[0].keys()):
table_rows.append([key.title()])
table_rows[-1].extend(
'${}'.format(ticket_type[key])
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
| <commit_before>
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in values[0].keys():
table_rows.append([key.title()])
table_rows[-1].extend(
ticket_type[key]
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
<commit_msg>Add dollar sign to fares, sort by fare type<commit_after>
import tornado.web
import ipy_table
from transperth.location import Location
class BaseRequestHandler(tornado.web.RequestHandler):
@property
def args(self):
args = self.request.arguments
return {
k: [sv.decode() for sv in v]
for k, v in args.items()
}
def get_location(self, key):
return Location.from_location(
self.get_argument(key)
)
def fares_to_table(fares):
keys, values = zip(*fares.items())
table_rows = [['Fare Type']]
table_rows[-1].extend(key.title() for key in keys)
for key in sorted(values[0].keys()):
table_rows.append([key.title()])
table_rows[-1].extend(
'${}'.format(ticket_type[key])
for ticket_type in values
)
table = ipy_table.make_table(table_rows)
table.apply_theme('basic')
return table
|
b82a7beffac7ccd497f88e7f72a70e9c3ae7146a | syntacticframes_project/loadmapping/migrations/0002_auto_20140916_1053.py | syntacticframes_project/loadmapping/migrations/0002_auto_20140916_1053.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
def delete_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs, delete_verbs)
]
| Allow to reverse LVFVerb migration | Allow to reverse LVFVerb migration
| Python | mit | aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs)
]
Allow to reverse LVFVerb migration | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
def delete_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs, delete_verbs)
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs)
]
<commit_msg>Allow to reverse LVFVerb migration<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
def delete_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs, delete_verbs)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs)
]
Allow to reverse LVFVerb migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
def delete_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs, delete_verbs)
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs)
]
<commit_msg>Allow to reverse LVFVerb migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from os.path import join
from django.db import models, migrations
from django.conf import settings
from loadmapping.models import LVFVerb
def import_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
with open(join(settings.SITE_ROOT, 'loadmapping/fixtures/lvfverb.json')) as fixture:
for entry in json.loads(fixture.read()):
assert entry['model'] == 'loadmapping.lvfverb'
fields = entry['fields']
LVFVerb(
lemma=fields['lemma'],
sense=fields['sense'],
lvf_class=fields['lvf_class'],
construction=fields['construction']).save()
def delete_verbs(apps, schema_editor):
LVFVerb = apps.get_model('loadmapping', 'LVFVerb')
LVFVerb.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('loadmapping', '0001_initial'),
]
operations = [
migrations.RunPython(import_verbs, delete_verbs)
]
|
2c70c70099cffe88439fa082fb0e7942d8cfed88 | tests/run_tests.py | tests/run_tests.py | #!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| #!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
import sys
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
RESULTS = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not RESULTS.wasSuccessful():
sys.exit(1)
| Exit non-zero if unit tests failed | Exit non-zero if unit tests failed
| Python | apache-2.0 | matyasselmeci/htcondor-ce,brianhlin/htcondor-ce,opensciencegrid/htcondor-ce,djw8605/htcondor-ce,brianhlin/htcondor-ce,brianhlin/htcondor-ce,matyasselmeci/htcondor-ce,opensciencegrid/htcondor-ce,bbockelm/htcondor-ce,opensciencegrid/htcondor-ce,matyasselmeci/htcondor-ce,djw8605/htcondor-ce,bbockelm/htcondor-ce,djw8605/htcondor-ce,bbockelm/htcondor-ce | #!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
unittest.TextTestRunner(verbosity=2).run(SUITE)
Exit non-zero if unit tests failed | #!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
import sys
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
RESULTS = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not RESULTS.wasSuccessful():
sys.exit(1)
| <commit_before>#!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
unittest.TextTestRunner(verbosity=2).run(SUITE)
<commit_msg>Exit non-zero if unit tests failed<commit_after> | #!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
import sys
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
RESULTS = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not RESULTS.wasSuccessful():
sys.exit(1)
| #!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
unittest.TextTestRunner(verbosity=2).run(SUITE)
Exit non-zero if unit tests failed#!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
import sys
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
RESULTS = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not RESULTS.wasSuccessful():
sys.exit(1)
| <commit_before>#!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
unittest.TextTestRunner(verbosity=2).run(SUITE)
<commit_msg>Exit non-zero if unit tests failed<commit_after>#!/bin/env python
"""Run HTCondor-CE unit tests"""
import glob
import unittest
import sys
TESTS = [test.strip('.py') for test in glob.glob('test*.py')]
SUITE = unittest.TestLoader().loadTestsFromNames(TESTS)
RESULTS = unittest.TextTestRunner(verbosity=2).run(SUITE)
if not RESULTS.wasSuccessful():
sys.exit(1)
|
bb27b536193fcc6ada7ab6a4193ac1bf889569d7 | indra/sources/hypothesis/api.py | indra/sources/hypothesis/api.py | import requests
| import requests
from indra.config import get_config
from .processor import HypothesisProcessor
base_url = 'https://api.hypothes.is/api/'
api_key = get_config('HYPOTHESIS_API_KEY')
headers = {'Authorization': 'Bearer %s' % api_key,
'Accept': 'application/vnd.hypothesis.v1+json',
'content-type': 'application/json'}
def send_request(endpoint, **params):
if api_key is None:
return ValueError('No API key set in HYPOTHESIS_API_KEY')
res = requests.get(base_url + endpoint, headers=headers,
params=params)
res.raise_for_status()
return res.json()
def process_annotations(group):
res = send_request('search', group=group)
annotations = res.get('rows', [])
hp = HypothesisProcessor(annotations)
hp.get_statements()
return hp
| Implement fetching annotations for a given group | Implement fetching annotations for a given group
| Python | bsd-2-clause | bgyori/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,sorgerlab/indra,bgyori/indra | import requests
Implement fetching annotations for a given group | import requests
from indra.config import get_config
from .processor import HypothesisProcessor
base_url = 'https://api.hypothes.is/api/'
api_key = get_config('HYPOTHESIS_API_KEY')
headers = {'Authorization': 'Bearer %s' % api_key,
'Accept': 'application/vnd.hypothesis.v1+json',
'content-type': 'application/json'}
def send_request(endpoint, **params):
if api_key is None:
return ValueError('No API key set in HYPOTHESIS_API_KEY')
res = requests.get(base_url + endpoint, headers=headers,
params=params)
res.raise_for_status()
return res.json()
def process_annotations(group):
res = send_request('search', group=group)
annotations = res.get('rows', [])
hp = HypothesisProcessor(annotations)
hp.get_statements()
return hp
| <commit_before>import requests
<commit_msg>Implement fetching annotations for a given group<commit_after> | import requests
from indra.config import get_config
from .processor import HypothesisProcessor
base_url = 'https://api.hypothes.is/api/'
api_key = get_config('HYPOTHESIS_API_KEY')
headers = {'Authorization': 'Bearer %s' % api_key,
'Accept': 'application/vnd.hypothesis.v1+json',
'content-type': 'application/json'}
def send_request(endpoint, **params):
if api_key is None:
return ValueError('No API key set in HYPOTHESIS_API_KEY')
res = requests.get(base_url + endpoint, headers=headers,
params=params)
res.raise_for_status()
return res.json()
def process_annotations(group):
res = send_request('search', group=group)
annotations = res.get('rows', [])
hp = HypothesisProcessor(annotations)
hp.get_statements()
return hp
| import requests
Implement fetching annotations for a given groupimport requests
from indra.config import get_config
from .processor import HypothesisProcessor
base_url = 'https://api.hypothes.is/api/'
api_key = get_config('HYPOTHESIS_API_KEY')
headers = {'Authorization': 'Bearer %s' % api_key,
'Accept': 'application/vnd.hypothesis.v1+json',
'content-type': 'application/json'}
def send_request(endpoint, **params):
if api_key is None:
return ValueError('No API key set in HYPOTHESIS_API_KEY')
res = requests.get(base_url + endpoint, headers=headers,
params=params)
res.raise_for_status()
return res.json()
def process_annotations(group):
res = send_request('search', group=group)
annotations = res.get('rows', [])
hp = HypothesisProcessor(annotations)
hp.get_statements()
return hp
| <commit_before>import requests
<commit_msg>Implement fetching annotations for a given group<commit_after>import requests
from indra.config import get_config
from .processor import HypothesisProcessor
base_url = 'https://api.hypothes.is/api/'
api_key = get_config('HYPOTHESIS_API_KEY')
headers = {'Authorization': 'Bearer %s' % api_key,
'Accept': 'application/vnd.hypothesis.v1+json',
'content-type': 'application/json'}
def send_request(endpoint, **params):
if api_key is None:
return ValueError('No API key set in HYPOTHESIS_API_KEY')
res = requests.get(base_url + endpoint, headers=headers,
params=params)
res.raise_for_status()
return res.json()
def process_annotations(group):
res = send_request('search', group=group)
annotations = res.get('rows', [])
hp = HypothesisProcessor(annotations)
hp.get_statements()
return hp
|
2f268a124a5aceabab6dcdb545cf14648c297e2c | lava_results_app/admin.py | lava_results_app/admin.py | # Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
| # Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import fields
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
| Fix deprecation warnings - RemovedInDjango19Warning | Fix deprecation warnings - RemovedInDjango19Warning
Extend fix in review #9160 for subsequent changes.
Change-Id: I25fbe759cfd28ac683ef94b58a8da098141e8d48
| Python | agpl-3.0 | Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server | # Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
Fix deprecation warnings - RemovedInDjango19Warning
Extend fix in review #9160 for subsequent changes.
Change-Id: I25fbe759cfd28ac683ef94b58a8da098141e8d48 | # Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import fields
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
| <commit_before># Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
<commit_msg>Fix deprecation warnings - RemovedInDjango19Warning
Extend fix in review #9160 for subsequent changes.
Change-Id: I25fbe759cfd28ac683ef94b58a8da098141e8d48<commit_after> | # Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import fields
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
| # Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
Fix deprecation warnings - RemovedInDjango19Warning
Extend fix in review #9160 for subsequent changes.
Change-Id: I25fbe759cfd28ac683ef94b58a8da098141e8d48# Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import fields
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
| <commit_before># Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
<commit_msg>Fix deprecation warnings - RemovedInDjango19Warning
Extend fix in review #9160 for subsequent changes.
Change-Id: I25fbe759cfd28ac683ef94b58a8da098141e8d48<commit_after># Copyright (C) 2015 Linaro Limited
#
# Author: Stevan Radakovic <stevan.radakovic@linaro.org>
#
# This file is part of Lava Server.
#
# Lava Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Lava Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Lava Dashboard. If not, see <http://www.gnu.org/licenses/>.
"""
Administration interface of the LAVA Results application.
"""
from django import forms
from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.contenttypes import fields
from django.utils.translation import ugettext_lazy as _
from lava_results_app.models import (
Query
)
class QueryAdmin(admin.ModelAdmin):
save_as = True
admin.site.register(Query, QueryAdmin)
|
3224ea27a23e1c254bb93a110be1bd481585cb99 | mosecom_air/api/models.py | mosecom_air/api/models.py | #coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
| #coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
| Add index for Measurement model | Add index for Measurement model
| Python | mit | elsid/mosecom-air,elsid/mosecom-air,elsid/mosecom-air | #coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
Add index for Measurement model | #coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
| <commit_before>#coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
<commit_msg>Add index for Measurement model<commit_after> | #coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
| #coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
Add index for Measurement model#coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
| <commit_before>#coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
<commit_msg>Add index for Measurement model<commit_after>#coding: utf-8
from django.db import models
class Substance(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Station(models.Model):
name = models.TextField(unique=True, db_index=True)
alias = models.TextField()
class Unit(models.Model):
name = models.TextField(unique=True, db_index=True)
class Measurement(models.Model):
station = models.ForeignKey(Station)
substance = models.ForeignKey(Substance)
unit = models.ForeignKey(Unit)
value = models.FloatField()
performed = models.DateTimeField()
written = models.DateTimeField(auto_now=True)
class Meta:
index_together = [
['station', 'substance', 'unit', 'performed']
]
|
8054982b3aa106a9551e792f6453993484a17f2a | tests/unit/test_factory.py | tests/unit/test_factory.py | # -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testing | # -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
@pytest.mark.skip(reason="Scheduler is not functioning and needs to be replaced.")
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testing | Mark test_default_config as skip; Scheduler needs to be rewritten | Mark test_default_config as skip; Scheduler needs to be rewritten
| Python | apache-2.0 | CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords | # -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testingMark test_default_config as skip; Scheduler needs to be rewritten | # -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
@pytest.mark.skip(reason="Scheduler is not functioning and needs to be replaced.")
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testing | <commit_before># -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testing<commit_msg>Mark test_default_config as skip; Scheduler needs to be rewritten<commit_after> | # -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
@pytest.mark.skip(reason="Scheduler is not functioning and needs to be replaced.")
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testing | # -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testingMark test_default_config as skip; Scheduler needs to be rewritten# -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
@pytest.mark.skip(reason="Scheduler is not functioning and needs to be replaced.")
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testing | <commit_before># -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testing<commit_msg>Mark test_default_config as skip; Scheduler needs to be rewritten<commit_after># -*- coding: utf-8 -*-
"""Test Factory Module
This module contains the tests for the OpenRecords Application Factory
"""
import os
import flask
import json
import pytest
from app import create_app
@pytest.mark.skip(reason="Scheduler is not functioning and needs to be replaced.")
def test_default_config():
"""Test the default config class is the DevelopmentConfig"""
assert isinstance(create_app(), flask.app.Flask)
def test_testing_config():
"""Test the app.testing variable is set when using the testing config."""
assert create_app(config_name='testing', jobs_enabled=False).testing |
e7cba721d78860d0151cc65793e567b0da719d39 | regserver/regulations/tests/partial_view_tests.py | regserver/regulations/tests/partial_view_tests.py | from unittest import TestCase
from mock import Mock, patch
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
rpv = PartialParagraphView()
context = rpv.get_context_data(paragraph_id = '867-53-q',
reg_version = 'verver')
self.assertEqual(context['node'],
generator.get_tree_paragraph.return_value)
| from unittest import TestCase
from mock import Mock, patch
from django.test import RequestFactory
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
paragraph_id = '103-3-a'
reg_version = '2013-10607'
request = RequestFactory().get('/fake-path')
view = PartialParagraphView.as_view(template_name='tree.html')
response = view(request, paragraph_id=paragraph_id, reg_version=reg_version)
self.assertEqual(response.context_data['node'],
generator.get_tree_paragraph.return_value)
| Change test, so that view has a request object | Change test, so that view has a request object
| Python | cc0-1.0 | ascott1/regulations-site,18F/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,adderall/regulations-site,adderall/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,EricSchles/regulations-site,EricSchles/regulations-site,willbarton/regulations-site,adderall/regulations-site,willbarton/regulations-site,willbarton/regulations-site,grapesmoker/regulations-site,tadhg-ohiggins/regulations-site,jeremiak/regulations-site,tadhg-ohiggins/regulations-site,EricSchles/regulations-site,jeremiak/regulations-site,18F/regulations-site,EricSchles/regulations-site,adderall/regulations-site,jeremiak/regulations-site,18F/regulations-site,eregs/regulations-site,ascott1/regulations-site,willbarton/regulations-site,grapesmoker/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,jeremiak/regulations-site,eregs/regulations-site,eregs/regulations-site | from unittest import TestCase
from mock import Mock, patch
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
rpv = PartialParagraphView()
context = rpv.get_context_data(paragraph_id = '867-53-q',
reg_version = 'verver')
self.assertEqual(context['node'],
generator.get_tree_paragraph.return_value)
Change test, so that view has a request object | from unittest import TestCase
from mock import Mock, patch
from django.test import RequestFactory
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
paragraph_id = '103-3-a'
reg_version = '2013-10607'
request = RequestFactory().get('/fake-path')
view = PartialParagraphView.as_view(template_name='tree.html')
response = view(request, paragraph_id=paragraph_id, reg_version=reg_version)
self.assertEqual(response.context_data['node'],
generator.get_tree_paragraph.return_value)
| <commit_before>from unittest import TestCase
from mock import Mock, patch
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
rpv = PartialParagraphView()
context = rpv.get_context_data(paragraph_id = '867-53-q',
reg_version = 'verver')
self.assertEqual(context['node'],
generator.get_tree_paragraph.return_value)
<commit_msg>Change test, so that view has a request object<commit_after> | from unittest import TestCase
from mock import Mock, patch
from django.test import RequestFactory
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
paragraph_id = '103-3-a'
reg_version = '2013-10607'
request = RequestFactory().get('/fake-path')
view = PartialParagraphView.as_view(template_name='tree.html')
response = view(request, paragraph_id=paragraph_id, reg_version=reg_version)
self.assertEqual(response.context_data['node'],
generator.get_tree_paragraph.return_value)
| from unittest import TestCase
from mock import Mock, patch
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
rpv = PartialParagraphView()
context = rpv.get_context_data(paragraph_id = '867-53-q',
reg_version = 'verver')
self.assertEqual(context['node'],
generator.get_tree_paragraph.return_value)
Change test, so that view has a request objectfrom unittest import TestCase
from mock import Mock, patch
from django.test import RequestFactory
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
paragraph_id = '103-3-a'
reg_version = '2013-10607'
request = RequestFactory().get('/fake-path')
view = PartialParagraphView.as_view(template_name='tree.html')
response = view(request, paragraph_id=paragraph_id, reg_version=reg_version)
self.assertEqual(response.context_data['node'],
generator.get_tree_paragraph.return_value)
| <commit_before>from unittest import TestCase
from mock import Mock, patch
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
rpv = PartialParagraphView()
context = rpv.get_context_data(paragraph_id = '867-53-q',
reg_version = 'verver')
self.assertEqual(context['node'],
generator.get_tree_paragraph.return_value)
<commit_msg>Change test, so that view has a request object<commit_after>from unittest import TestCase
from mock import Mock, patch
from django.test import RequestFactory
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
paragraph_id = '103-3-a'
reg_version = '2013-10607'
request = RequestFactory().get('/fake-path')
view = PartialParagraphView.as_view(template_name='tree.html')
response = view(request, paragraph_id=paragraph_id, reg_version=reg_version)
self.assertEqual(response.context_data['node'],
generator.get_tree_paragraph.return_value)
|
b8e9a2af61e1b8fe45e32966495e46357a145a56 | dom/automation/detect_assertions.py | dom/automation/detect_assertions.py | #!/usr/bin/env python
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global ignoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
ignoreList.append(line)
def ignore(assertion):
global ignoreList
for ig in ignoreList:
if (assertion.find(ig) != -1):
return True
return False
ignoreList = []
getIgnores()
# print "detect_assertions is ready (ignoring %d assertions)" % len(ignoreList)
| #!/usr/bin/env python
import platform
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
| Make known_assertions.txt cross-machine and hopefully also cross-platform. | Make known_assertions.txt cross-machine and hopefully also cross-platform.
| Python | mpl-2.0 | nth10sd/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz | #!/usr/bin/env python
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global ignoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
ignoreList.append(line)
def ignore(assertion):
global ignoreList
for ig in ignoreList:
if (assertion.find(ig) != -1):
return True
return False
ignoreList = []
getIgnores()
# print "detect_assertions is ready (ignoring %d assertions)" % len(ignoreList)
Make known_assertions.txt cross-machine and hopefully also cross-platform. | #!/usr/bin/env python
import platform
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
| <commit_before>#!/usr/bin/env python
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global ignoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
ignoreList.append(line)
def ignore(assertion):
global ignoreList
for ig in ignoreList:
if (assertion.find(ig) != -1):
return True
return False
ignoreList = []
getIgnores()
# print "detect_assertions is ready (ignoring %d assertions)" % len(ignoreList)
<commit_msg>Make known_assertions.txt cross-machine and hopefully also cross-platform.<commit_after> | #!/usr/bin/env python
import platform
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
| #!/usr/bin/env python
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global ignoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
ignoreList.append(line)
def ignore(assertion):
global ignoreList
for ig in ignoreList:
if (assertion.find(ig) != -1):
return True
return False
ignoreList = []
getIgnores()
# print "detect_assertions is ready (ignoring %d assertions)" % len(ignoreList)
Make known_assertions.txt cross-machine and hopefully also cross-platform.#!/usr/bin/env python
import platform
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
| <commit_before>#!/usr/bin/env python
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global ignoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
ignoreList.append(line)
def ignore(assertion):
global ignoreList
for ig in ignoreList:
if (assertion.find(ig) != -1):
return True
return False
ignoreList = []
getIgnores()
# print "detect_assertions is ready (ignoring %d assertions)" % len(ignoreList)
<commit_msg>Make known_assertions.txt cross-machine and hopefully also cross-platform.<commit_after>#!/usr/bin/env python
import platform
def amiss(logPrefix):
global ignoreList
foundSomething = False
currentFile = file(logPrefix + "-err", "r")
# map from (assertion message) to (true, if seen in the current file)
seenInCurrentFile = {}
for line in currentFile:
line = line.strip("\x07").rstrip("\n")
if (line.startswith("###!!!") and not (line in seenInCurrentFile)):
seenInCurrentFile[line] = True
if not (ignore(line)):
print line
foundSomething = True
currentFile.close()
return foundSomething
def getIgnores():
global simpleIgnoreList
ignoreFile = open("known_assertions.txt", "r")
for line in ignoreFile:
line = line.strip()
if ((len(line) > 0) and not line.startswith("#")):
mpi = line.find(", file ") # assertions use this format
if (mpi == -1):
mpi = line.find(": file ") # aborts use this format
if (mpi == -1):
simpleIgnoreList.append(line)
else:
twoPartIgnoreList.append((line[:mpi+7], localSlashes(line[mpi+7:])))
def localSlashes(s):
if platform.system() in ('Windows', 'Microsoft'):
return s.replace("\\", "/")
return s
def ignore(assertion):
global simpleIgnoreList
for ig in simpleIgnoreList:
if assertion.find(ig) != -1:
return True
for (part1, part2) in twoPartIgnoreList:
if assertion.find(part1) != -1 and assertion.find(part2) != -1:
return True
return False
simpleIgnoreList = []
twoPartIgnoreList = []
getIgnores()
#print "detect_assertions is ready (ignoring %d strings without filenames and %d strings with filenames)" % (len(simpleIgnoreList), len(twoPartIgnoreList))
|
32ac109aec82210ccfa617b438a844b0f300157c | comics/core/context_processors.py | comics/core/context_processors.py | from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
| from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
| Add search_enabled to site settings context processor | Add search_enabled to site settings context processor
| Python | agpl-3.0 | jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics | from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
Add search_enabled to site settings context processor | from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
| <commit_before>from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
<commit_msg>Add search_enabled to site settings context processor<commit_after> | from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
| from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
Add search_enabled to site settings context processorfrom django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
| <commit_before>from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
<commit_msg>Add search_enabled to site settings context processor<commit_after>from django.conf import settings
from django.db.models import Count, Max
from comics.core.models import Comic
def site_settings(request):
return {
'site_title': settings.COMICS_SITE_TITLE,
'site_tagline': settings.COMICS_SITE_TAGLINE,
'google_analytics_code': settings.COMICS_GOOGLE_ANALYTICS_CODE,
'search_enabled': 'comics.search' in settings.INSTALLED_APPS,
}
def all_comics(request):
all_comics = Comic.objects.sort_by_name()
all_comics = all_comics.annotate(Max('release__fetched'))
all_comics = all_comics.annotate(Count('release'))
return {'all_comics': all_comics}
|
8ea3ab66f8a8a71a311bc30b3abec8b9ad681a4e | tpt/util/s3util.py | tpt/util/s3util.py | import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
| import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.etag:
response['Etag'] = key.etag
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
| Add Etag header to proxy response from s3 | Add Etag header to proxy response from s3
| Python | apache-2.0 | youprofit/rust-ci-1,hansjorg/rust-ci,youprofit/rust-ci-1,hansjorg/rust-ci,youprofit/rust-ci-1,youprofit/rust-ci-1 | import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
Add Etag header to proxy response from s3 | import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.etag:
response['Etag'] = key.etag
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
| <commit_before>import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
<commit_msg>Add Etag header to proxy response from s3<commit_after> | import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.etag:
response['Etag'] = key.etag
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
| import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
Add Etag header to proxy response from s3import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.etag:
response['Etag'] = key.etag
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
| <commit_before>import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
<commit_msg>Add Etag header to proxy response from s3<commit_after>import boto
import logging
from tpt import private_settings
from boto.s3.key import Key
from django.http import StreamingHttpResponse
from django.http import Http404
logger = logging.getLogger(__name__)
def stream_object(key_name):
s3 = boto.connect_s3(
aws_access_key_id = private_settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key = private_settings.AWS_SECRET_ACCESS_KEY)
bucket = s3.get_bucket(private_settings.AWS_S3_BUCKET, validate=False)
key = bucket.get_key(key_name)
if not key:
logger.warn('Unable to find key "{}" in bucket "{}"'.format(key_name,
private_settings.AWS_S3_BUCKET))
raise Http404()
response = StreamingHttpResponse(key)
if key.etag:
response['Etag'] = key.etag
if key.content_type:
response['Content-Type'] = key.content_type
else:
response['Content-Type'] = 'text/plain'
if key.size:
response['Content-Length'] = key.size
return response
|
1edd6ee6b71b3f3ac9654cc47804592613dd61ec | clowder/clowder/cli/init_controller.py | clowder/clowder/cli/init_controller.py | from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
| import sys
from cement.ext.ext_argparse import expose
from termcolor import colored, cprint
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import network_connection_required
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@network_connection_required
def default(self):
if self.clowder_repo:
cprint('Clowder already initialized in this directory\n', 'red')
sys.exit(1)
url_output = colored(self.app.pargs.url, 'green')
print('Create clowder repo from ' + url_output + '\n')
if self.app.pargs.branch is None:
branch = 'master'
else:
branch = str(self.app.pargs.branch[0])
self.clowder_repo.init(self.app.pargs.url, branch)
| Add `clowder init` logic to Cement controller | Add `clowder init` logic to Cement controller
| Python | mit | JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder | from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
Add `clowder init` logic to Cement controller | import sys
from cement.ext.ext_argparse import expose
from termcolor import colored, cprint
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import network_connection_required
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@network_connection_required
def default(self):
if self.clowder_repo:
cprint('Clowder already initialized in this directory\n', 'red')
sys.exit(1)
url_output = colored(self.app.pargs.url, 'green')
print('Create clowder repo from ' + url_output + '\n')
if self.app.pargs.branch is None:
branch = 'master'
else:
branch = str(self.app.pargs.branch[0])
self.clowder_repo.init(self.app.pargs.url, branch)
| <commit_before>from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
<commit_msg>Add `clowder init` logic to Cement controller<commit_after> | import sys
from cement.ext.ext_argparse import expose
from termcolor import colored, cprint
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import network_connection_required
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@network_connection_required
def default(self):
if self.clowder_repo:
cprint('Clowder already initialized in this directory\n', 'red')
sys.exit(1)
url_output = colored(self.app.pargs.url, 'green')
print('Create clowder repo from ' + url_output + '\n')
if self.app.pargs.branch is None:
branch = 'master'
else:
branch = str(self.app.pargs.branch[0])
self.clowder_repo.init(self.app.pargs.url, branch)
| from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
Add `clowder init` logic to Cement controllerimport sys
from cement.ext.ext_argparse import expose
from termcolor import colored, cprint
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import network_connection_required
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@network_connection_required
def default(self):
if self.clowder_repo:
cprint('Clowder already initialized in this directory\n', 'red')
sys.exit(1)
url_output = colored(self.app.pargs.url, 'green')
print('Create clowder repo from ' + url_output + '\n')
if self.app.pargs.branch is None:
branch = 'master'
else:
branch = str(self.app.pargs.branch[0])
self.clowder_repo.init(self.app.pargs.url, branch)
| <commit_before>from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
<commit_msg>Add `clowder init` logic to Cement controller<commit_after>import sys
from cement.ext.ext_argparse import expose
from termcolor import colored, cprint
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import network_connection_required
class InitController(AbstractBaseController):
class Meta:
label = 'init'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Clone repository to clowder directory and create clowder.yaml symlink'
arguments = [
(['url'], dict(metavar='URL', help='url of repo containing clowder.yaml')),
(['--branch', '-b'], dict(nargs=1, metavar='BRANCH', help='branch of repo containing clowder.yaml'))
]
@expose(help="second-controller default command", hide=True)
@network_connection_required
def default(self):
if self.clowder_repo:
cprint('Clowder already initialized in this directory\n', 'red')
sys.exit(1)
url_output = colored(self.app.pargs.url, 'green')
print('Create clowder repo from ' + url_output + '\n')
if self.app.pargs.branch is None:
branch = 'master'
else:
branch = str(self.app.pargs.branch[0])
self.clowder_repo.init(self.app.pargs.url, branch)
|
8846747540b54b47e97a06471cd3daedc3a28f47 | modules/pipeurlbuilder.py | modules/pipeurlbuilder.py | # pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
| # pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
| Remove trailing slash (google charts don't like it) | Remove trailing slash (google charts don't like it)
| Python | mit | nerevu/riko,nerevu/riko | # pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
Remove trailing slash (google charts don't like it) | # pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
| <commit_before># pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
<commit_msg>Remove trailing slash (google charts don't like it)<commit_after> | # pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
| # pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
Remove trailing slash (google charts don't like it)# pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
| <commit_before># pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
<commit_msg>Remove trailing slash (google charts don't like it)<commit_after># pipeurlbuilder.py
#
import urllib
from pipe2py import util
def pipe_urlbuilder(context, _INPUT, conf, **kwargs):
"""This source builds a url and yields it forever.
Keyword arguments:
context -- pipeline context
_INPUT -- not used
conf:
BASE -- base
PATH -- path elements
PARAM -- query parameters
Yields (_OUTPUT):
url
"""
for item in _INPUT:
#note: we could cache get_value results if item==True
url = util.get_value(conf['BASE'], item, **kwargs)
if not url.endswith('/'):
url += '/'
path = util.get_value(conf['PATH'], item, **kwargs)
if not isinstance(path, list):
path = [path]
url += "/".join(path)
url = url.rstrip("/")
params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']])
if params:
url += "?" + urllib.urlencode(params)
yield url
|
9532a28dacefec67ea67f94cf992a505d8a6629d | utilities/ticker-update.py | utilities/ticker-update.py | import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
| import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
CONF_FILE = "ticker-updates.conf"
secutities = []
with open(CONF_FILE, "r") as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
print(securities)
for security in securities:
print(security)
symbol, sell_price = security.split(',')
print(f"sy: {symbol} p: {sell_price}")
query = URL + symbol
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = float(span.get_text())
table_row = soup.select('table td')
open = float(table_row[3].text)
print(f"{symbol:>6}: {sell_price:<6} {open:<6} {price:<6} {open - price:<6}")
| Fix file read, start on sell price | Fix file read, start on sell price | Python | mit | daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various | import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
Fix file read, start on sell price | import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
CONF_FILE = "ticker-updates.conf"
secutities = []
with open(CONF_FILE, "r") as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
print(securities)
for security in securities:
print(security)
symbol, sell_price = security.split(',')
print(f"sy: {symbol} p: {sell_price}")
query = URL + symbol
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = float(span.get_text())
table_row = soup.select('table td')
open = float(table_row[3].text)
print(f"{symbol:>6}: {sell_price:<6} {open:<6} {price:<6} {open - price:<6}")
| <commit_before>import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
<commit_msg>Fix file read, start on sell price<commit_after> | import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
CONF_FILE = "ticker-updates.conf"
secutities = []
with open(CONF_FILE, "r") as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
print(securities)
for security in securities:
print(security)
symbol, sell_price = security.split(',')
print(f"sy: {symbol} p: {sell_price}")
query = URL + symbol
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = float(span.get_text())
table_row = soup.select('table td')
open = float(table_row[3].text)
print(f"{symbol:>6}: {sell_price:<6} {open:<6} {price:<6} {open - price:<6}")
| import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
Fix file read, start on sell priceimport requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
CONF_FILE = "ticker-updates.conf"
secutities = []
with open(CONF_FILE, "r") as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
print(securities)
for security in securities:
print(security)
symbol, sell_price = security.split(',')
print(f"sy: {symbol} p: {sell_price}")
query = URL + symbol
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = float(span.get_text())
table_row = soup.select('table td')
open = float(table_row[3].text)
print(f"{symbol:>6}: {sell_price:<6} {open:<6} {price:<6} {open - price:<6}")
| <commit_before>import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
<commit_msg>Fix file read, start on sell price<commit_after>import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
CONF_FILE = "ticker-updates.conf"
secutities = []
with open(CONF_FILE, "r") as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
print(securities)
for security in securities:
print(security)
symbol, sell_price = security.split(',')
print(f"sy: {symbol} p: {sell_price}")
query = URL + symbol
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = float(span.get_text())
table_row = soup.select('table td')
open = float(table_row[3].text)
print(f"{symbol:>6}: {sell_price:<6} {open:<6} {price:<6} {open - price:<6}")
|
06ec5baaa799836c656f67b083b77197943d97f2 | drogher/__init__.py | drogher/__init__.py | from . import shippers
def barcode(b):
for klass in ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
| from . import shippers
def barcode(b, barcode_classes=None):
if barcode_classes is None:
barcode_classes = ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']
for klass in barcode_classes:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
| Allow barcode classes to be optionally specified | Allow barcode classes to be optionally specified
| Python | bsd-3-clause | jbittel/drogher | from . import shippers
def barcode(b):
for klass in ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
Allow barcode classes to be optionally specified | from . import shippers
def barcode(b, barcode_classes=None):
if barcode_classes is None:
barcode_classes = ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']
for klass in barcode_classes:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
| <commit_before>from . import shippers
def barcode(b):
for klass in ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
<commit_msg>Allow barcode classes to be optionally specified<commit_after> | from . import shippers
def barcode(b, barcode_classes=None):
if barcode_classes is None:
barcode_classes = ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']
for klass in barcode_classes:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
| from . import shippers
def barcode(b):
for klass in ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
Allow barcode classes to be optionally specifiedfrom . import shippers
def barcode(b, barcode_classes=None):
if barcode_classes is None:
barcode_classes = ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']
for klass in barcode_classes:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
| <commit_before>from . import shippers
def barcode(b):
for klass in ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
<commit_msg>Allow barcode classes to be optionally specified<commit_after>from . import shippers
def barcode(b, barcode_classes=None):
if barcode_classes is None:
barcode_classes = ['DHL', 'FedExExpress', 'FedExGround96', 'UPS', 'USPSIMpb', 'USPS13']
for klass in barcode_classes:
shipper = getattr(shippers, klass)(b)
if shipper.is_valid:
return shipper
return shippers.Unknown(b)
|
6f16efcce43683868fde945ce59d87311f81a87c | virtool/downloads/utils.py | virtool/downloads/utils.py | """
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
| """
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
def format_subtraction_filename(subtraction_id: str, subtraction_name: str):
"""
Format a subtraction filename of the form "subtraction-subtraction_id-subtraction_name.fa.gz".
:param subtraction_id: the subtraction id
:param subtraction_name: the subtraction name
:return: a compound subtraction filename
"""
name = subtraction_name.replace(" ", "-").lower()
return f"subtraction-{subtraction_id}-{name}.fa.gz"
| Add function to format subtraction filename | Add function to format subtraction filename
| Python | mit | igboyes/virtool,virtool/virtool,igboyes/virtool,virtool/virtool | """
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
Add function to format subtraction filename | """
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
def format_subtraction_filename(subtraction_id: str, subtraction_name: str):
"""
Format a subtraction filename of the form "subtraction-subtraction_id-subtraction_name.fa.gz".
:param subtraction_id: the subtraction id
:param subtraction_name: the subtraction name
:return: a compound subtraction filename
"""
name = subtraction_name.replace(" ", "-").lower()
return f"subtraction-{subtraction_id}-{name}.fa.gz"
| <commit_before>"""
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
<commit_msg>Add function to format subtraction filename<commit_after> | """
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
def format_subtraction_filename(subtraction_id: str, subtraction_name: str):
"""
Format a subtraction filename of the form "subtraction-subtraction_id-subtraction_name.fa.gz".
:param subtraction_id: the subtraction id
:param subtraction_name: the subtraction name
:return: a compound subtraction filename
"""
name = subtraction_name.replace(" ", "-").lower()
return f"subtraction-{subtraction_id}-{name}.fa.gz"
| """
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
Add function to format subtraction filename"""
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
def format_subtraction_filename(subtraction_id: str, subtraction_name: str):
"""
Format a subtraction filename of the form "subtraction-subtraction_id-subtraction_name.fa.gz".
:param subtraction_id: the subtraction id
:param subtraction_name: the subtraction name
:return: a compound subtraction filename
"""
name = subtraction_name.replace(" ", "-").lower()
return f"subtraction-{subtraction_id}-{name}.fa.gz"
| <commit_before>"""
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
<commit_msg>Add function to format subtraction filename<commit_after>"""
Utilities focussing on formatting FASTA files.
"""
def format_fasta_entry(otu_name: str, isolate_name: str, sequence_id: str, sequence: str) -> str:
"""
Create a FASTA header and sequence block for a sequence in a otu DNA FASTA file downloadable from Virtool.
:param otu_name: the otu name to include in the header
:param isolate_name: the isolate name to include in the header
:param sequence_id: the sequence id to include in the header
:param sequence: the sequence for the FASTA entry
:return: a FASTA entry
"""
return f">{otu_name}|{isolate_name}|{sequence_id}|{len(sequence)}\n{sequence}"
def format_fasta_filename(*args) -> str:
"""
Format a FASTA filename of the form "otu.isolate.sequence_id.fa".
:param args: the filename parts
:return: a compound FASTA filename
"""
if len(args) > 3:
raise ValueError("Unexpected number of filename parts")
if len(args) == 0:
raise ValueError("At least one filename part required")
filename = ".".join(args).replace(" ", "_") + ".fa"
return filename.lower()
def format_subtraction_filename(subtraction_id: str, subtraction_name: str):
"""
Format a subtraction filename of the form "subtraction-subtraction_id-subtraction_name.fa.gz".
:param subtraction_id: the subtraction id
:param subtraction_name: the subtraction name
:return: a compound subtraction filename
"""
name = subtraction_name.replace(" ", "-").lower()
return f"subtraction-{subtraction_id}-{name}.fa.gz"
|
610a1b000fd9de6e7e4c994e10c30c1aa9acbb82 | csunplugged/utils/check_glossary_links.py | csunplugged/utils/check_glossary_links.py | """Module for checking glossary links found within Markdown conversions."""
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except ObjectDoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
| """Module for checking glossary links found within Markdown conversions."""
from django.core.exceptions import DoesNotExist
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except DoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
| Add import for Django exception | Add import for Django exception
| Python | mit | uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged | """Module for checking glossary links found within Markdown conversions."""
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except ObjectDoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
Add import for Django exception | """Module for checking glossary links found within Markdown conversions."""
from django.core.exceptions import DoesNotExist
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except DoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
| <commit_before>"""Module for checking glossary links found within Markdown conversions."""
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except ObjectDoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
<commit_msg>Add import for Django exception<commit_after> | """Module for checking glossary links found within Markdown conversions."""
from django.core.exceptions import DoesNotExist
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except DoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
| """Module for checking glossary links found within Markdown conversions."""
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except ObjectDoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
Add import for Django exception"""Module for checking glossary links found within Markdown conversions."""
from django.core.exceptions import DoesNotExist
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except DoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
| <commit_before>"""Module for checking glossary links found within Markdown conversions."""
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except ObjectDoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
<commit_msg>Add import for Django exception<commit_after>"""Module for checking glossary links found within Markdown conversions."""
from django.core.exceptions import DoesNotExist
from utils.errors.CouldNotFindGlossaryTerm import CouldNotFindGlossaryTerm
from topics.models import GlossaryTerm
def check_converter_glossary_links(glossary_links, md_file_path):
"""Process glossary links found by Markdown converter.
Args:
glossary_links: Dictionary of glossary links (dict).
"""
for term in glossary_links.keys():
try:
GlossaryTerm.objects.get(slug=term)
except DoesNotExist:
raise CouldNotFindGlossaryTerm(term, md_file_path)
|
71c5cddd3c9e459da7ceb71942dbc95abbfac518 | tvecs/logger/init_logger.py | tvecs/logger/init_logger.py | """Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""Initialise the logger based on user preference."""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""Set Logging Level for specified logger."""
logger.setLevel(level)
def set_logger_verbose(logger):
"""Set Logger to verbose level."""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""Set Logger to silent level."""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""Set Logger to info level."""
set_logger_level(logging.INFO, logger)
| """Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""
Initialise the logger based on user preference.
API Documentation
:param handler_name: Handler name specified for logger.
:type handler_name: :class:`String`
:rtype: :class:`logging.Logger`
.. seealso::
* :mod:`logging`
"""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""
Set Logging Level for specified logger.
API Documentation
:param level: Minimum level specified which can be logged.
:param logger: Logger for which the level should be specified.
:type level: :class:`Integer`
:type logger: :class:`logging.Logger`
"""
logger.setLevel(level)
def set_logger_verbose(logger):
"""
Set Logger to verbose level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""
Set Logger to silent level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""
Set Logger to info level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.INFO, logger)
| Add Docstring for public modules | Logger: Add Docstring for public modules
| Python | mit | KshitijKarthick/tvecs,KshitijKarthick/tvecs,KshitijKarthick/tvecs | """Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""Initialise the logger based on user preference."""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""Set Logging Level for specified logger."""
logger.setLevel(level)
def set_logger_verbose(logger):
"""Set Logger to verbose level."""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""Set Logger to silent level."""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""Set Logger to info level."""
set_logger_level(logging.INFO, logger)
Logger: Add Docstring for public modules | """Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""
Initialise the logger based on user preference.
API Documentation
:param handler_name: Handler name specified for logger.
:type handler_name: :class:`String`
:rtype: :class:`logging.Logger`
.. seealso::
* :mod:`logging`
"""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""
Set Logging Level for specified logger.
API Documentation
:param level: Minimum level specified which can be logged.
:param logger: Logger for which the level should be specified.
:type level: :class:`Integer`
:type logger: :class:`logging.Logger`
"""
logger.setLevel(level)
def set_logger_verbose(logger):
"""
Set Logger to verbose level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""
Set Logger to silent level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""
Set Logger to info level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.INFO, logger)
| <commit_before>"""Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""Initialise the logger based on user preference."""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""Set Logging Level for specified logger."""
logger.setLevel(level)
def set_logger_verbose(logger):
"""Set Logger to verbose level."""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""Set Logger to silent level."""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""Set Logger to info level."""
set_logger_level(logging.INFO, logger)
<commit_msg>Logger: Add Docstring for public modules<commit_after> | """Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""
Initialise the logger based on user preference.
API Documentation
:param handler_name: Handler name specified for logger.
:type handler_name: :class:`String`
:rtype: :class:`logging.Logger`
.. seealso::
* :mod:`logging`
"""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""
Set Logging Level for specified logger.
API Documentation
:param level: Minimum level specified which can be logged.
:param logger: Logger for which the level should be specified.
:type level: :class:`Integer`
:type logger: :class:`logging.Logger`
"""
logger.setLevel(level)
def set_logger_verbose(logger):
"""
Set Logger to verbose level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""
Set Logger to silent level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""
Set Logger to info level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.INFO, logger)
| """Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""Initialise the logger based on user preference."""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""Set Logging Level for specified logger."""
logger.setLevel(level)
def set_logger_verbose(logger):
"""Set Logger to verbose level."""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""Set Logger to silent level."""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""Set Logger to info level."""
set_logger_level(logging.INFO, logger)
Logger: Add Docstring for public modules"""Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""
Initialise the logger based on user preference.
API Documentation
:param handler_name: Handler name specified for logger.
:type handler_name: :class:`String`
:rtype: :class:`logging.Logger`
.. seealso::
* :mod:`logging`
"""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""
Set Logging Level for specified logger.
API Documentation
:param level: Minimum level specified which can be logged.
:param logger: Logger for which the level should be specified.
:type level: :class:`Integer`
:type logger: :class:`logging.Logger`
"""
logger.setLevel(level)
def set_logger_verbose(logger):
"""
Set Logger to verbose level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""
Set Logger to silent level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""
Set Logger to info level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.INFO, logger)
| <commit_before>"""Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""Initialise the logger based on user preference."""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""Set Logging Level for specified logger."""
logger.setLevel(level)
def set_logger_verbose(logger):
"""Set Logger to verbose level."""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""Set Logger to silent level."""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""Set Logger to info level."""
set_logger_level(logging.INFO, logger)
<commit_msg>Logger: Add Docstring for public modules<commit_after>"""Initialise logging functionality."""
import sys
import logging
def initialise(handler_name):
"""
Initialise the logger based on user preference.
API Documentation
:param handler_name: Handler name specified for logger.
:type handler_name: :class:`String`
:rtype: :class:`logging.Logger`
.. seealso::
* :mod:`logging`
"""
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s-%(levelname)s-%(name)s-%(message)s'
)
logger = logging.getLogger(handler_name)
return logger
def set_logger_level(level, logger):
"""
Set Logging Level for specified logger.
API Documentation
:param level: Minimum level specified which can be logged.
:param logger: Logger for which the level should be specified.
:type level: :class:`Integer`
:type logger: :class:`logging.Logger`
"""
logger.setLevel(level)
def set_logger_verbose(logger):
"""
Set Logger to verbose level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.DEBUG, logger)
def set_logger_silent(logger):
"""
Set Logger to silent level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.ERROR, logger)
def set_logger_normal(logger):
"""
Set Logger to info level.
API Documentation
:param logger: Logger for which the level should be specified.
:type logger: :class:`logging.Logger`
"""
set_logger_level(logging.INFO, logger)
|
bacff0ca6cae1f7488853f565b3030eb49ebad01 | cherryontop/decorators/qp.py | cherryontop/decorators/qp.py | import functools
import inspect
import cherrypy
from cherrypy.lib.httputil import parse_query_string
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
dynamic_url_args = _positional_arg_names(f)
if set(allowed) & set(dynamic_url_args): # disjoint?
msg = 'query param and dynamic url component names cannot match'
raise ValueError(msg)
@functools.wraps(f)
def wrapped(*args, **kwargs):
query_params = parse_query_string(cherrypy.request.query_string)
# no param names conflict with dynamic url component names
for param in query_params:
if param in dynamic_url_args:
raise UnexpectedParameter(param)
# all supplied parameters allowed?
for param in query_params:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param, cast in cast_funcs:
if param in query_params:
try:
query_params[param] = cast(query_params[param])
except ValueError:
raise InvalidParameter(param)
kwargs.update(query_params)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
def _positional_arg_names(f):
spec = inspect.getargspec(f)
args = spec.args
num_positional = len(args) - len(spec.defaults)
return args[:num_positional]
| import functools
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
# all supplied parameters allowed?
for param in kwargs:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param_name, cast in cast_funcs:
if param_name in kwargs:
try:
kwargs[param_name] = cast(kwargs[param_name])
except ValueError:
raise InvalidParameter(param_name)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
| Revert "confirm query params never overwrite dynamic url components" | Revert "confirm query params never overwrite dynamic url components"
This reverts commit 9aa3a57a289985d24877515995b3f1d589624a8d.
Conflicts:
cherryontop/decorators/qp.py
| Python | bsd-3-clause | csira/cherryontop | import functools
import inspect
import cherrypy
from cherrypy.lib.httputil import parse_query_string
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
dynamic_url_args = _positional_arg_names(f)
if set(allowed) & set(dynamic_url_args): # disjoint?
msg = 'query param and dynamic url component names cannot match'
raise ValueError(msg)
@functools.wraps(f)
def wrapped(*args, **kwargs):
query_params = parse_query_string(cherrypy.request.query_string)
# no param names conflict with dynamic url component names
for param in query_params:
if param in dynamic_url_args:
raise UnexpectedParameter(param)
# all supplied parameters allowed?
for param in query_params:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param, cast in cast_funcs:
if param in query_params:
try:
query_params[param] = cast(query_params[param])
except ValueError:
raise InvalidParameter(param)
kwargs.update(query_params)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
def _positional_arg_names(f):
spec = inspect.getargspec(f)
args = spec.args
num_positional = len(args) - len(spec.defaults)
return args[:num_positional]
Revert "confirm query params never overwrite dynamic url components"
This reverts commit 9aa3a57a289985d24877515995b3f1d589624a8d.
Conflicts:
cherryontop/decorators/qp.py | import functools
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
# all supplied parameters allowed?
for param in kwargs:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param_name, cast in cast_funcs:
if param_name in kwargs:
try:
kwargs[param_name] = cast(kwargs[param_name])
except ValueError:
raise InvalidParameter(param_name)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
| <commit_before>import functools
import inspect
import cherrypy
from cherrypy.lib.httputil import parse_query_string
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
dynamic_url_args = _positional_arg_names(f)
if set(allowed) & set(dynamic_url_args): # disjoint?
msg = 'query param and dynamic url component names cannot match'
raise ValueError(msg)
@functools.wraps(f)
def wrapped(*args, **kwargs):
query_params = parse_query_string(cherrypy.request.query_string)
# no param names conflict with dynamic url component names
for param in query_params:
if param in dynamic_url_args:
raise UnexpectedParameter(param)
# all supplied parameters allowed?
for param in query_params:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param, cast in cast_funcs:
if param in query_params:
try:
query_params[param] = cast(query_params[param])
except ValueError:
raise InvalidParameter(param)
kwargs.update(query_params)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
def _positional_arg_names(f):
spec = inspect.getargspec(f)
args = spec.args
num_positional = len(args) - len(spec.defaults)
return args[:num_positional]
<commit_msg>Revert "confirm query params never overwrite dynamic url components"
This reverts commit 9aa3a57a289985d24877515995b3f1d589624a8d.
Conflicts:
cherryontop/decorators/qp.py<commit_after> | import functools
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
# all supplied parameters allowed?
for param in kwargs:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param_name, cast in cast_funcs:
if param_name in kwargs:
try:
kwargs[param_name] = cast(kwargs[param_name])
except ValueError:
raise InvalidParameter(param_name)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
| import functools
import inspect
import cherrypy
from cherrypy.lib.httputil import parse_query_string
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
dynamic_url_args = _positional_arg_names(f)
if set(allowed) & set(dynamic_url_args): # disjoint?
msg = 'query param and dynamic url component names cannot match'
raise ValueError(msg)
@functools.wraps(f)
def wrapped(*args, **kwargs):
query_params = parse_query_string(cherrypy.request.query_string)
# no param names conflict with dynamic url component names
for param in query_params:
if param in dynamic_url_args:
raise UnexpectedParameter(param)
# all supplied parameters allowed?
for param in query_params:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param, cast in cast_funcs:
if param in query_params:
try:
query_params[param] = cast(query_params[param])
except ValueError:
raise InvalidParameter(param)
kwargs.update(query_params)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
def _positional_arg_names(f):
spec = inspect.getargspec(f)
args = spec.args
num_positional = len(args) - len(spec.defaults)
return args[:num_positional]
Revert "confirm query params never overwrite dynamic url components"
This reverts commit 9aa3a57a289985d24877515995b3f1d589624a8d.
Conflicts:
cherryontop/decorators/qp.pyimport functools
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
# all supplied parameters allowed?
for param in kwargs:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param_name, cast in cast_funcs:
if param_name in kwargs:
try:
kwargs[param_name] = cast(kwargs[param_name])
except ValueError:
raise InvalidParameter(param_name)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
| <commit_before>import functools
import inspect
import cherrypy
from cherrypy.lib.httputil import parse_query_string
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
dynamic_url_args = _positional_arg_names(f)
if set(allowed) & set(dynamic_url_args): # disjoint?
msg = 'query param and dynamic url component names cannot match'
raise ValueError(msg)
@functools.wraps(f)
def wrapped(*args, **kwargs):
query_params = parse_query_string(cherrypy.request.query_string)
# no param names conflict with dynamic url component names
for param in query_params:
if param in dynamic_url_args:
raise UnexpectedParameter(param)
# all supplied parameters allowed?
for param in query_params:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param, cast in cast_funcs:
if param in query_params:
try:
query_params[param] = cast(query_params[param])
except ValueError:
raise InvalidParameter(param)
kwargs.update(query_params)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
def _positional_arg_names(f):
spec = inspect.getargspec(f)
args = spec.args
num_positional = len(args) - len(spec.defaults)
return args[:num_positional]
<commit_msg>Revert "confirm query params never overwrite dynamic url components"
This reverts commit 9aa3a57a289985d24877515995b3f1d589624a8d.
Conflicts:
cherryontop/decorators/qp.py<commit_after>import functools
from cherryontop.errors import InvalidParameter, UnexpectedParameter
def typecast_query_params(*a, **kw):
allowed, cast_funcs = _get_checks(*a, **kw)
def wrap(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
# all supplied parameters allowed?
for param in kwargs:
if param not in allowed:
raise UnexpectedParameter(param)
# typecast params
for param_name, cast in cast_funcs:
if param_name in kwargs:
try:
kwargs[param_name] = cast(kwargs[param_name])
except ValueError:
raise InvalidParameter(param_name)
return f(*args, **kwargs)
return wrapped
return wrap
def _get_checks(*args, **kwargs):
allowed = kwargs.pop('allow', [])
allowed = set(allowed)
to_cast = []
for caster in args:
param_name, func = caster
if not callable(func):
raise TypeError('cast func must be callable')
allowed.add(param_name)
to_cast.append(caster)
return allowed, to_cast
|
88f00611ea000d0fed984e93aaa661db2c2bd79e | contrib/tempest/tempest/exceptions/share_exceptions.py | contrib/tempest/tempest/exceptions/share_exceptions.py | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
| # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class SnapshotBuildErrorException(base.TempestException):
message = "Snapshot %(snapshot_id)s failed to build and is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
| Add exception to tempest plugin | Add exception to tempest plugin
Exception 'SnapshotBuildErrorException' was used, but
was not defined.
Change-Id: Ida7554d65eb6657fa05b7d53cbfa452cc0239f74
| Python | apache-2.0 | bswartz/manila,openstack/manila,vponomaryov/manila,weiting-chen/manila,jcsp/manila,scality/manila,NetApp/manila,NetApp/manila,openstack/manila,sajuptpm/manila,vponomaryov/manila,redhat-openstack/manila,sajuptpm/manila,jcsp/manila,bswartz/manila,redhat-openstack/manila,scality/manila,weiting-chen/manila | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
Add exception to tempest plugin
Exception 'SnapshotBuildErrorException' was used, but
was not defined.
Change-Id: Ida7554d65eb6657fa05b7d53cbfa452cc0239f74 | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class SnapshotBuildErrorException(base.TempestException):
message = "Snapshot %(snapshot_id)s failed to build and is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
| <commit_before># Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
<commit_msg>Add exception to tempest plugin
Exception 'SnapshotBuildErrorException' was used, but
was not defined.
Change-Id: Ida7554d65eb6657fa05b7d53cbfa452cc0239f74<commit_after> | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class SnapshotBuildErrorException(base.TempestException):
message = "Snapshot %(snapshot_id)s failed to build and is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
| # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
Add exception to tempest plugin
Exception 'SnapshotBuildErrorException' was used, but
was not defined.
Change-Id: Ida7554d65eb6657fa05b7d53cbfa452cc0239f74# Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class SnapshotBuildErrorException(base.TempestException):
message = "Snapshot %(snapshot_id)s failed to build and is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
| <commit_before># Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
<commit_msg>Add exception to tempest plugin
Exception 'SnapshotBuildErrorException' was used, but
was not defined.
Change-Id: Ida7554d65eb6657fa05b7d53cbfa452cc0239f74<commit_after># Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.exceptions import base
class ShareBuildErrorException(base.TempestException):
message = "Share %(share_id)s failed to build and is in ERROR status"
class AccessRuleBuildErrorException(base.TempestException):
message = "Share's rule with id %(rule_id) is in ERROR status"
class SnapshotBuildErrorException(base.TempestException):
message = "Snapshot %(snapshot_id)s failed to build and is in ERROR status"
class ShareProtocolNotSpecified(base.TempestException):
message = "Share can not be created, share protocol is not specified"
class ShareNetworkNotSpecified(base.TempestException):
message = "Share can not be created, share network not specified"
class NoAvailableNetwork(base.TempestException):
message = "No available network for service VM"
|
17cf285748ee519c6d28971baefbf4ed506fac1e | water_level/water_level.py | water_level/water_level.py | '''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
[3 2 3] -> 1
'''
if __name__ == '__main__':
pass | '''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
An integer array defines the height of a 2D set of columns. After it rains enough amount of water,
how much water will be contained in the valleys formed by these mountains?
Ex: [3 2 3]
X X X W X
X X X -> X X X -> 1
X X X X X X
'''
def water_level(a):
if not a:
raise "Array cannot be empty"
water = 0
leftIndex, rightIndex = 0, len(a) - 1
left, right = a[0], a[-1]
while leftIndex <= rightIndex:
if left <= right:
water += max(left - a[leftIndex], 0)
left = max(left, a[leftIndex])
leftIndex += 1
else:
water += max(right - a[rightIndex], 0)
right = max(right, a[rightIndex])
rightIndex -= 1
return water
if __name__ == '__main__':
assert(water_level([3, 2, 3]) == 1)
assert(water_level([1, 2, 3, 4]) == 0)
assert(water_level([5, 1, 3, 4]) == 4)
assert(water_level([2, 1, 4, 3, 6]) == 2)
print "Successful" | Include initial solution for the water level problem problem | Include initial solution for the water level problem problem
| Python | mit | alkaitz/general-programming | '''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
[3 2 3] -> 1
'''
if __name__ == '__main__':
passInclude initial solution for the water level problem problem | '''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
An integer array defines the height of a 2D set of columns. After it rains enough amount of water,
how much water will be contained in the valleys formed by these mountains?
Ex: [3 2 3]
X X X W X
X X X -> X X X -> 1
X X X X X X
'''
def water_level(a):
if not a:
raise "Array cannot be empty"
water = 0
leftIndex, rightIndex = 0, len(a) - 1
left, right = a[0], a[-1]
while leftIndex <= rightIndex:
if left <= right:
water += max(left - a[leftIndex], 0)
left = max(left, a[leftIndex])
leftIndex += 1
else:
water += max(right - a[rightIndex], 0)
right = max(right, a[rightIndex])
rightIndex -= 1
return water
if __name__ == '__main__':
assert(water_level([3, 2, 3]) == 1)
assert(water_level([1, 2, 3, 4]) == 0)
assert(water_level([5, 1, 3, 4]) == 4)
assert(water_level([2, 1, 4, 3, 6]) == 2)
print "Successful" | <commit_before>'''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
[3 2 3] -> 1
'''
if __name__ == '__main__':
pass<commit_msg>Include initial solution for the water level problem problem<commit_after> | '''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
An integer array defines the height of a 2D set of columns. After it rains enough amount of water,
how much water will be contained in the valleys formed by these mountains?
Ex: [3 2 3]
X X X W X
X X X -> X X X -> 1
X X X X X X
'''
def water_level(a):
if not a:
raise "Array cannot be empty"
water = 0
leftIndex, rightIndex = 0, len(a) - 1
left, right = a[0], a[-1]
while leftIndex <= rightIndex:
if left <= right:
water += max(left - a[leftIndex], 0)
left = max(left, a[leftIndex])
leftIndex += 1
else:
water += max(right - a[rightIndex], 0)
right = max(right, a[rightIndex])
rightIndex -= 1
return water
if __name__ == '__main__':
assert(water_level([3, 2, 3]) == 1)
assert(water_level([1, 2, 3, 4]) == 0)
assert(water_level([5, 1, 3, 4]) == 4)
assert(water_level([2, 1, 4, 3, 6]) == 2)
print "Successful" | '''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
[3 2 3] -> 1
'''
if __name__ == '__main__':
passInclude initial solution for the water level problem problem'''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
An integer array defines the height of a 2D set of columns. After it rains enough amount of water,
how much water will be contained in the valleys formed by these mountains?
Ex: [3 2 3]
X X X W X
X X X -> X X X -> 1
X X X X X X
'''
def water_level(a):
if not a:
raise "Array cannot be empty"
water = 0
leftIndex, rightIndex = 0, len(a) - 1
left, right = a[0], a[-1]
while leftIndex <= rightIndex:
if left <= right:
water += max(left - a[leftIndex], 0)
left = max(left, a[leftIndex])
leftIndex += 1
else:
water += max(right - a[rightIndex], 0)
right = max(right, a[rightIndex])
rightIndex -= 1
return water
if __name__ == '__main__':
assert(water_level([3, 2, 3]) == 1)
assert(water_level([1, 2, 3, 4]) == 0)
assert(water_level([5, 1, 3, 4]) == 4)
assert(water_level([2, 1, 4, 3, 6]) == 2)
print "Successful" | <commit_before>'''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
[3 2 3] -> 1
'''
if __name__ == '__main__':
pass<commit_msg>Include initial solution for the water level problem problem<commit_after>'''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
An integer array defines the height of a 2D set of columns. After it rains enough amount of water,
how much water will be contained in the valleys formed by these mountains?
Ex: [3 2 3]
X X X W X
X X X -> X X X -> 1
X X X X X X
'''
def water_level(a):
if not a:
raise "Array cannot be empty"
water = 0
leftIndex, rightIndex = 0, len(a) - 1
left, right = a[0], a[-1]
while leftIndex <= rightIndex:
if left <= right:
water += max(left - a[leftIndex], 0)
left = max(left, a[leftIndex])
leftIndex += 1
else:
water += max(right - a[rightIndex], 0)
right = max(right, a[rightIndex])
rightIndex -= 1
return water
if __name__ == '__main__':
assert(water_level([3, 2, 3]) == 1)
assert(water_level([1, 2, 3, 4]) == 0)
assert(water_level([5, 1, 3, 4]) == 4)
assert(water_level([2, 1, 4, 3, 6]) == 2)
print "Successful" |
d3f5e0e2d6104963237a0626d608cc1b0949b762 | zounds/learn/functional.py | zounds/learn/functional.py | import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x):
original_shape = x.shape
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
return normed.reshape(original_shape)
| import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x, return_norms=False):
original_shape = x.shape
# flatten all dimensions of x, treating the first axis as examples and all
# other axes as features
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
normed = normed.reshape(original_shape)
if return_norms:
return normed, norms
else:
return normed
| Add an option to also return intermediate example norms | Add an option to also return intermediate example norms
| Python | mit | JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds | import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x):
original_shape = x.shape
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
return normed.reshape(original_shape)
Add an option to also return intermediate example norms | import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x, return_norms=False):
original_shape = x.shape
# flatten all dimensions of x, treating the first axis as examples and all
# other axes as features
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
normed = normed.reshape(original_shape)
if return_norms:
return normed, norms
else:
return normed
| <commit_before>import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x):
original_shape = x.shape
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
return normed.reshape(original_shape)
<commit_msg>Add an option to also return intermediate example norms<commit_after> | import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x, return_norms=False):
original_shape = x.shape
# flatten all dimensions of x, treating the first axis as examples and all
# other axes as features
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
normed = normed.reshape(original_shape)
if return_norms:
return normed, norms
else:
return normed
| import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x):
original_shape = x.shape
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
return normed.reshape(original_shape)
Add an option to also return intermediate example normsimport numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x, return_norms=False):
original_shape = x.shape
# flatten all dimensions of x, treating the first axis as examples and all
# other axes as features
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
normed = normed.reshape(original_shape)
if return_norms:
return normed, norms
else:
return normed
| <commit_before>import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x):
original_shape = x.shape
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
return normed.reshape(original_shape)
<commit_msg>Add an option to also return intermediate example norms<commit_after>import numpy as np
def hyperplanes(means, stds, n_planes):
if len(means) != len(stds):
raise ValueError('means and stds must have the same length')
n_features = len(means)
a = np.random.normal(means, stds, (n_planes, n_features))
b = np.random.normal(means, stds, (n_planes, n_features))
plane_vectors = a - b
return plane_vectors
def simhash(plane_vectors, data):
output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8)
flattened = data.reshape((len(data), -1))
x = np.dot(plane_vectors, flattened.T).T
output[np.where(x > 0)] = 1
return output
def example_wise_unit_norm(x, return_norms=False):
original_shape = x.shape
# flatten all dimensions of x, treating the first axis as examples and all
# other axes as features
x = x.reshape((len(x), -1))
norms = np.linalg.norm(x, axis=-1, keepdims=True)
normed = np.divide(x, norms, where=norms != 0)
normed = normed.reshape(original_shape)
if return_norms:
return normed, norms
else:
return normed
|
6068905219a04974f18033b3cf64b2a037f05d7b | opps/core/__init__.py | opps/core/__init__.py | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
| # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'redactor',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
| Add django contrib redirects on opps core init | Add django contrib redirects on opps core init
| Python | mit | opps/opps,opps/opps,williamroot/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
Add django contrib redirects on opps core init | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'redactor',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
| <commit_before># -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
<commit_msg>Add django contrib redirects on opps core init<commit_after> | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'redactor',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
| # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
Add django contrib redirects on opps core init# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'redactor',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
| <commit_before># -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
<commit_msg>Add django contrib redirects on opps core init<commit_after># -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
trans_app_label = _('Opps')
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'django.contrib.redirects',
'redactor',
'tagging',)
settings.MIDDLEWARE_CLASSES += (
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
|
cf49e996f07a2fd7107b953369fdccdc850d51d8 | test_tws/test_EReader.py | test_tws/test_EReader.py | '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = EReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
| '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = self.parent.createReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
| Create EReader object using EClientSocket.createReader() | Create EReader object using EClientSocket.createReader() | Python | bsd-3-clause | kbluck/pytws,kbluck/pytws | '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = EReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
Create EReader object using EClientSocket.createReader() | '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = self.parent.createReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
| <commit_before>'''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = EReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
<commit_msg>Create EReader object using EClientSocket.createReader()<commit_after> | '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = self.parent.createReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
| '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = EReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
Create EReader object using EClientSocket.createReader()'''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = self.parent.createReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
| <commit_before>'''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = EReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
<commit_msg>Create EReader object using EClientSocket.createReader()<commit_after>'''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = self.parent.createReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
|
28b570b98f802b5c15e88669776ed3a87b553dd7 | tests/functional/base.py | tests/functional/base.py | from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 5 seconds for an element to appear
self.browser.implicitly_wait(5)
def tearDown(self):
self.browser.quit()
| from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 10 seconds for an element to appear
self.browser.implicitly_wait(10)
def tearDown(self):
self.browser.quit()
| Increase implicitly wait limit to 10 seconds | Increase implicitly wait limit to 10 seconds
This is due to loading external scripts e.g. sharethis which take a
longer to load
| Python | bsd-3-clause | andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop,kevgathuku/compshop | from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 5 seconds for an element to appear
self.browser.implicitly_wait(5)
def tearDown(self):
self.browser.quit()
Increase implicitly wait limit to 10 seconds
This is due to loading external scripts e.g. sharethis which take a
longer to load | from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 10 seconds for an element to appear
self.browser.implicitly_wait(10)
def tearDown(self):
self.browser.quit()
| <commit_before>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 5 seconds for an element to appear
self.browser.implicitly_wait(5)
def tearDown(self):
self.browser.quit()
<commit_msg>Increase implicitly wait limit to 10 seconds
This is due to loading external scripts e.g. sharethis which take a
longer to load<commit_after> | from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 10 seconds for an element to appear
self.browser.implicitly_wait(10)
def tearDown(self):
self.browser.quit()
| from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 5 seconds for an element to appear
self.browser.implicitly_wait(5)
def tearDown(self):
self.browser.quit()
Increase implicitly wait limit to 10 seconds
This is due to loading external scripts e.g. sharethis which take a
longer to loadfrom django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 10 seconds for an element to appear
self.browser.implicitly_wait(10)
def tearDown(self):
self.browser.quit()
| <commit_before>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 5 seconds for an element to appear
self.browser.implicitly_wait(5)
def tearDown(self):
self.browser.quit()
<commit_msg>Increase implicitly wait limit to 10 seconds
This is due to loading external scripts e.g. sharethis which take a
longer to load<commit_after>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
class FunctionalTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
# Wait up tp 10 seconds for an element to appear
self.browser.implicitly_wait(10)
def tearDown(self):
self.browser.quit()
|
540493a69ff2e9a5e6cc93a75b34af3c9f79b808 | plugins/generic/syntax.py | plugins/generic/syntax.py | #!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
retVal = retVal.replace(item, escaper(item[1:-1]))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
| #!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
_ = item[1:-1]
if _:
retVal = retVal.replace(item, escaper(_))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
| Fix for empty strings (previously '' was just removed) | Fix for empty strings (previously '' was just removed)
| Python | apache-2.0 | RexGene/monsu-server,RexGene/monsu-server,dtrip/.ubuntu,dtrip/.ubuntu | #!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
retVal = retVal.replace(item, escaper(item[1:-1]))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
Fix for empty strings (previously '' was just removed) | #!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
_ = item[1:-1]
if _:
retVal = retVal.replace(item, escaper(_))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
| <commit_before>#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
retVal = retVal.replace(item, escaper(item[1:-1]))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
<commit_msg>Fix for empty strings (previously '' was just removed)<commit_after> | #!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
_ = item[1:-1]
if _:
retVal = retVal.replace(item, escaper(_))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
| #!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
retVal = retVal.replace(item, escaper(item[1:-1]))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
Fix for empty strings (previously '' was just removed)#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
_ = item[1:-1]
if _:
retVal = retVal.replace(item, escaper(_))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
| <commit_before>#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
retVal = retVal.replace(item, escaper(item[1:-1]))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
<commit_msg>Fix for empty strings (previously '' was just removed)<commit_after>#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.exception import SqlmapUndefinedMethod
class Syntax:
"""
This class defines generic syntax functionalities for plugins.
"""
def __init__(self):
pass
@staticmethod
def _escape(expression, quote=True, escaper=None):
retVal = expression
if quote:
for item in re.findall(r"'[^']*'+", expression, re.S):
_ = item[1:-1]
if _:
retVal = retVal.replace(item, escaper(_))
else:
retVal = escaper(expression)
return retVal
@staticmethod
def escape(expression, quote=True):
errMsg = "'escape' method must be defined "
errMsg += "inside the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
|
22cfc216d3c0a11f5c90b27919fb0590cd3a210f | doc/render.py | doc/render.py | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
return f.read()
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
| # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
lines = f.readlines()
# Strip copyright header, if it has one.
if lines[1][0:10] == "Copyright ":
lines = lines[16:]
return "".join(lines)
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
| Make documentation generator strip copyright header from inline examples | Make documentation generator strip copyright header from inline examples
| Python | apache-2.0 | google/jsonnet,bowlofstew/jsonnet,davidzchen/jsonnet,Neeke/jsonnet,huggsboson/jsonnet,bowlofstew/jsonnet,sparkprime/jsonnet,google/jsonnet,huggsboson/jsonnet,Neeke/jsonnet,habibmasuro/jsonnet,lamuguo/jsonnet,bowlofstew/jsonnet,google/jsonnet,huggsboson/jsonnet,darioajr/jsonnet,sparkprime/jsonnet,bowlofstew/jsonnet,lamuguo/jsonnet,sparkprime/jsonnet,davidzchen/jsonnet,habibmasuro/jsonnet,Neeke/jsonnet,jbeda/jsonnet,lamuguo/jsonnet,tyler-elric/jsonnet,habibmasuro/jsonnet,tyler-elric/jsonnet,davidzchen/jsonnet,davidzchen/jsonnet,huggsboson/jsonnet,habibmasuro/jsonnet,tyler-elric/jsonnet,jbeda/jsonnet,Neeke/jsonnet,davidzchen/jsonnet,tyler-elric/jsonnet,bowlofstew/jsonnet,google/jsonnet,darioajr/jsonnet,google/jsonnet,Neeke/jsonnet,darioajr/jsonnet,darioajr/jsonnet,huggsboson/jsonnet,jbeda/jsonnet,tyler-elric/jsonnet,sparkprime/jsonnet,lamuguo/jsonnet,google/jsonnet,jbeda/jsonnet,jbeda/jsonnet,sparkprime/jsonnet,habibmasuro/jsonnet,sparkprime/jsonnet,lamuguo/jsonnet,darioajr/jsonnet | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
return f.read()
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
Make documentation generator strip copyright header from inline examples | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
lines = f.readlines()
# Strip copyright header, if it has one.
if lines[1][0:10] == "Copyright ":
lines = lines[16:]
return "".join(lines)
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
| <commit_before># Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
return f.read()
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
<commit_msg>Make documentation generator strip copyright header from inline examples<commit_after> | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
lines = f.readlines()
# Strip copyright header, if it has one.
if lines[1][0:10] == "Copyright ":
lines = lines[16:]
return "".join(lines)
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
| # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
return f.read()
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
Make documentation generator strip copyright header from inline examples# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
lines = f.readlines()
# Strip copyright header, if it has one.
if lines[1][0:10] == "Copyright ":
lines = lines[16:]
return "".join(lines)
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
| <commit_before># Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
return f.read()
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
<commit_msg>Make documentation generator strip copyright header from inline examples<commit_after># Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import sys
import jinja2
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath="."),
autoescape=True)
def myinclude(filename):
with codecs.open(filename, 'r', 'utf-8') as f:
lines = f.readlines()
# Strip copyright header, if it has one.
if lines[1][0:10] == "Copyright ":
lines = lines[16:]
return "".join(lines)
env.globals['myinclude'] = myinclude
if len(sys.argv) != 3:
sys.stderr.write("Usage: %s <filename.html.jinja> <output.html>\n" % sys.argv[0])
sys.exit(1)
html = env.get_template(sys.argv[1]).render()
myfile = codecs.open(sys.argv[2], 'w', 'utf-8')
myfile.write(html)
myfile.close()
|
0bdc48ce94a8c501dba1ce2925615714a46a1728 | pygameMidi_extended.py | pygameMidi_extended.py | #import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb) | #import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)
def set_instrument_bank(self, bank, channel):
assert (0 <= channel <= 15)
assert bank <= 127
self.write_short(0xB0 + channel, 0x00, bank) | Add method for instrument bank | Add method for instrument bank
| Python | bsd-3-clause | RenolY2/py-playBMS | #import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)Add method for instrument bank | #import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)
def set_instrument_bank(self, bank, channel):
assert (0 <= channel <= 15)
assert bank <= 127
self.write_short(0xB0 + channel, 0x00, bank) | <commit_before>#import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)<commit_msg>Add method for instrument bank<commit_after> | #import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)
def set_instrument_bank(self, bank, channel):
assert (0 <= channel <= 15)
assert bank <= 127
self.write_short(0xB0 + channel, 0x00, bank) | #import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)Add method for instrument bank#import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)
def set_instrument_bank(self, bank, channel):
assert (0 <= channel <= 15)
assert bank <= 127
self.write_short(0xB0 + channel, 0x00, bank) | <commit_before>#import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)<commit_msg>Add method for instrument bank<commit_after>#import pygame.midi.Output
from pygame.midi import Output
class Output(Output):#pygame.midi.Output):
def set_pan(self, pan, channel):
assert (0 <= channel <= 15)
assert pan <= 127
self.write_short(0xB0 + channel, 0x0A, pan)
def set_volume(self, volume, channel):
assert (0 <= channel <= 15)
assert volume <= 127
self.write_short(0xB0 + channel, 0x07, volume)
def set_pitch(self, pitch, channel):
assert (0 <= channel <= 15)
assert pitch <= (2**14-1)
# the 7 least significant bits come into the first data byte,
# the 7 most significant bits come into the second data byte
pitch_lsb = (pitch >> 7) & 127
pitch_msb = pitch & 127
self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)
def set_instrument_bank(self, bank, channel):
assert (0 <= channel <= 15)
assert bank <= 127
self.write_short(0xB0 + channel, 0x00, bank) |
54c4efdcbd87b32788760f1d40e6148ae264abdc | crawler/models.py | crawler/models.py | from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField()
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
| from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField(default=0)
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
| Make count of words default 0 | Make count of words default 0
| Python | mit | lucasgr7/silverplate,lucasgr7/silverplate,lucasgr7/silverplate | from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField()
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
Make count of words default 0 | from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField(default=0)
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
| <commit_before>from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField()
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
<commit_msg>Make count of words default 0<commit_after> | from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField(default=0)
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
| from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField()
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
Make count of words default 0from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField(default=0)
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
| <commit_before>from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField()
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
<commit_msg>Make count of words default 0<commit_after>from django.db import models
class DataIngredient(models.Model):
""""Class used to Store Ingredients of the recipes found in the crawling process"""
ingredient = models.CharField(max_length=1000)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Ingredientes')
def __str__(self):
return self.ingredient
class DataWayCooking(models.Model):
"""Class used to Store steps of the recipes found in the crawling process"""
description = models.CharField(max_length=500)
recipe = models.CharField(max_length=500)
group = models.CharField(max_length=500, default='Modo de Fazer')
def __str__(self):
return self.description
class IngredientSpec(models.Model):
"""Class used to manipulate Ingredients found and change data to data mining and found patterns of ingredients"""
word = models.CharField(max_length=500)
count = models.IntegerField(default=0)
type = models.CharField(max_length=1)
class IgnoredWords(models.Model):
"""Model to store words to ignore from Ingredient Spec"""
word = models.CharField(max_length=500)
|
cd7c9653944a2d90d64ef1b3847b9e63d0309321 | corehq/apps/importer/urls.py | corehq/apps/importer/urls.py | from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})$',
'importer_job_poll', name='importer_job_poll'),
)
| from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})/$',
'importer_job_poll', name='importer_job_poll'),
)
| Add trailing slash to url | Add trailing slash to url
| Python | bsd-3-clause | SEL-Columbia/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,gmimano/commcaretest,dimagi/commcare-hq | from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})$',
'importer_job_poll', name='importer_job_poll'),
)
Add trailing slash to url | from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})/$',
'importer_job_poll', name='importer_job_poll'),
)
| <commit_before>from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})$',
'importer_job_poll', name='importer_job_poll'),
)
<commit_msg>Add trailing slash to url<commit_after> | from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})/$',
'importer_job_poll', name='importer_job_poll'),
)
| from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})$',
'importer_job_poll', name='importer_job_poll'),
)
Add trailing slash to urlfrom django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})/$',
'importer_job_poll', name='importer_job_poll'),
)
| <commit_before>from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})$',
'importer_job_poll', name='importer_job_poll'),
)
<commit_msg>Add trailing slash to url<commit_after>from django.conf.urls.defaults import *
urlpatterns = patterns('corehq.apps.importer.views',
url(r'^excel/config/$', 'excel_config', name='excel_config'),
url(r'^excel/fields/$', 'excel_fields', name='excel_fields'),
url(r'^excel/commit/$', 'excel_commit', name='excel_commit'),
url(r'^importer_ajax/(?P<download_id>[0-9a-fA-Z]{25,32})/$',
'importer_job_poll', name='importer_job_poll'),
)
|
7f359fef5791aa8615c4298261c28e584f791c17 | dform/tests/urls.py | dform/tests/urls.py | from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'admin/', include(admin.site.urls)),
url(r'rankedmodel/', include('awl.rankedmodel.urls')),
url(r'dform/', include('dform.urls')),
url(r'dform_admin/', include('dform.admin_urls')),
]
| from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^rankedmodel/', include('awl.rankedmodel.urls')),
url(r'^dform/', include('dform.urls')),
url(r'^dform_admin/', include('dform.admin_urls')),
]
| Fix missing ^ in url patterns | Fix missing ^ in url patterns
| Python | mit | cltrudeau/django-dform,cltrudeau/django-dform,cltrudeau/django-dform | from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'admin/', include(admin.site.urls)),
url(r'rankedmodel/', include('awl.rankedmodel.urls')),
url(r'dform/', include('dform.urls')),
url(r'dform_admin/', include('dform.admin_urls')),
]
Fix missing ^ in url patterns | from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^rankedmodel/', include('awl.rankedmodel.urls')),
url(r'^dform/', include('dform.urls')),
url(r'^dform_admin/', include('dform.admin_urls')),
]
| <commit_before>from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'admin/', include(admin.site.urls)),
url(r'rankedmodel/', include('awl.rankedmodel.urls')),
url(r'dform/', include('dform.urls')),
url(r'dform_admin/', include('dform.admin_urls')),
]
<commit_msg>Fix missing ^ in url patterns<commit_after> | from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^rankedmodel/', include('awl.rankedmodel.urls')),
url(r'^dform/', include('dform.urls')),
url(r'^dform_admin/', include('dform.admin_urls')),
]
| from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'admin/', include(admin.site.urls)),
url(r'rankedmodel/', include('awl.rankedmodel.urls')),
url(r'dform/', include('dform.urls')),
url(r'dform_admin/', include('dform.admin_urls')),
]
Fix missing ^ in url patternsfrom django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^rankedmodel/', include('awl.rankedmodel.urls')),
url(r'^dform/', include('dform.urls')),
url(r'^dform_admin/', include('dform.admin_urls')),
]
| <commit_before>from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'admin/', include(admin.site.urls)),
url(r'rankedmodel/', include('awl.rankedmodel.urls')),
url(r'dform/', include('dform.urls')),
url(r'dform_admin/', include('dform.admin_urls')),
]
<commit_msg>Fix missing ^ in url patterns<commit_after>from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^rankedmodel/', include('awl.rankedmodel.urls')),
url(r'^dform/', include('dform.urls')),
url(r'^dform_admin/', include('dform.admin_urls')),
]
|
38cf3aed45ac604884d4ae1fed30714755f46cc8 | discussion/forms.py | discussion/forms.py | from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
| from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Reply to this conversation'}),
}
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Start a conversation'}),
}
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
| Set widgets for textareas so we can set placeholder attribute | Set widgets for textareas so we can set placeholder attribute
| Python | bsd-2-clause | lehins/lehins-discussion,incuna/django-discussion,incuna/django-discussion,lehins/lehins-discussion,lehins/lehins-discussion | from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
Set widgets for textareas so we can set placeholder attribute | from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Reply to this conversation'}),
}
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Start a conversation'}),
}
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
| <commit_before>from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
<commit_msg>Set widgets for textareas so we can set placeholder attribute<commit_after> | from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Reply to this conversation'}),
}
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Start a conversation'}),
}
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
| from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
Set widgets for textareas so we can set placeholder attributefrom django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Reply to this conversation'}),
}
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Start a conversation'}),
}
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
| <commit_before>from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
<commit_msg>Set widgets for textareas so we can set placeholder attribute<commit_after>from django import forms
from discussion.models import Comment, Post, Discussion
class CommentForm(forms.ModelForm):
class Meta:
exclude = ('user', 'post')
model = Comment
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Reply to this conversation'}),
}
class PostForm(forms.ModelForm):
class Meta:
exclude = ('user', 'discussion')
model = Post
widgets = {
'body' : forms.Textarea(attrs={'placeholder' : 'Start a conversation'}),
}
class SearchForm(forms.Form):
search = forms.CharField()
discussion = forms.ModelChoiceField(required=False, queryset=Discussion.objects, empty_label='All discussions')
#class Meta:
# exclude = ('discussion', 'slug')
# model = Post
|
61679e3faf44bc1d54388f617554f03809b2eead | gpytorch/kernels/periodic_kernel.py | gpytorch/kernels/periodic_kernel.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp().unsqueeze(1)
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp()
| Fix dimensions of periodic kernel parameters | Fix dimensions of periodic kernel parameters
| Python | mit | jrg365/gpytorch,jrg365/gpytorch,jrg365/gpytorch | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp().unsqueeze(1)
Fix dimensions of periodic kernel parameters | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp()
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp().unsqueeze(1)
<commit_msg>Fix dimensions of periodic kernel parameters<commit_after> | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp().unsqueeze(1)
Fix dimensions of periodic kernel parametersfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp()
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp().unsqueeze(1)
<commit_msg>Fix dimensions of periodic kernel parameters<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import torch
from torch import nn
from .kernel import Kernel
class PeriodicKernel(Kernel):
def __init__(
self,
log_lengthscale_bounds=(-10000, 10000),
log_period_length_bounds=(-10000, 10000),
eps=1e-5,
active_dims=None,
):
super(PeriodicKernel, self).__init__(
has_lengthscale=True,
log_lengthscale_bounds=log_lengthscale_bounds,
active_dims=active_dims,
)
self.eps = eps
self.register_parameter(
'log_period_length',
nn.Parameter(torch.zeros(1, 1, 1)),
bounds=log_period_length_bounds,
)
def forward(self, x1, x2):
lengthscale = (self.log_lengthscale.exp() + self.eps).sqrt_()
period_length = (self.log_period_length.exp() + self.eps).sqrt_()
diff = torch.sum((x1.unsqueeze(2) - x2.unsqueeze(1)).abs(), -1)
res = - 2 * torch.sin(math.pi * diff / period_length).pow(2) / lengthscale
return res.exp()
|
d2d81cfe441171085f954c30eec718a0220ac286 | hoomd/md/pytest/test_table_pressure.py | hoomd/md/pytest/test_table_pressure.py | import hoomd
import io
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0001)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo)
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
| import hoomd
import io
import numpy
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo, quantities=['pressure'])
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
output_lines = output.getvalue().split('\n')
ideal_gas_pressure = (2 * thermo.translational_kinetic_energy / 3
/ sim.state.box.volume)
numpy.testing.assert_allclose(float(output_lines[1]),
ideal_gas_pressure,
rtol=0.2)
| Add output check on the pressure quantity | Add output check on the pressure quantity
| Python | bsd-3-clause | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | import hoomd
import io
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0001)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo)
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
Add output check on the pressure quantity | import hoomd
import io
import numpy
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo, quantities=['pressure'])
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
output_lines = output.getvalue().split('\n')
ideal_gas_pressure = (2 * thermo.translational_kinetic_energy / 3
/ sim.state.box.volume)
numpy.testing.assert_allclose(float(output_lines[1]),
ideal_gas_pressure,
rtol=0.2)
| <commit_before>import hoomd
import io
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0001)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo)
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
<commit_msg>Add output check on the pressure quantity<commit_after> | import hoomd
import io
import numpy
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo, quantities=['pressure'])
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
output_lines = output.getvalue().split('\n')
ideal_gas_pressure = (2 * thermo.translational_kinetic_energy / 3
/ sim.state.box.volume)
numpy.testing.assert_allclose(float(output_lines[1]),
ideal_gas_pressure,
rtol=0.2)
| import hoomd
import io
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0001)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo)
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
Add output check on the pressure quantityimport hoomd
import io
import numpy
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo, quantities=['pressure'])
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
output_lines = output.getvalue().split('\n')
ideal_gas_pressure = (2 * thermo.translational_kinetic_energy / 3
/ sim.state.box.volume)
numpy.testing.assert_allclose(float(output_lines[1]),
ideal_gas_pressure,
rtol=0.2)
| <commit_before>import hoomd
import io
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0001)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo)
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
<commit_msg>Add output check on the pressure quantity<commit_after>import hoomd
import io
import numpy
def test_table_pressure(simulation_factory, two_particle_snapshot_factory):
"""Test that write.table can log MD pressure values."""
thermo = hoomd.md.compute.ThermodynamicQuantities(hoomd.filter.All())
snap = two_particle_snapshot_factory()
if snap.communicator.rank == 0:
snap.particles.velocity[:] = [[-2, 0, 0], [2, 0, 0]]
sim = simulation_factory(snap)
sim.operations.add(thermo)
integrator = hoomd.md.Integrator(dt=0.0)
integrator.methods.append(
hoomd.md.methods.NVT(hoomd.filter.All(), tau=1, kT=1))
sim.operations.integrator = integrator
logger = hoomd.logging.Logger(categories=['scalar'])
logger.add(thermo, quantities=['pressure'])
output = io.StringIO("")
table_writer = hoomd.write.Table(1, logger, output)
sim.operations.writers.append(table_writer)
sim.run(1)
output_lines = output.getvalue().split('\n')
ideal_gas_pressure = (2 * thermo.translational_kinetic_energy / 3
/ sim.state.box.volume)
numpy.testing.assert_allclose(float(output_lines[1]),
ideal_gas_pressure,
rtol=0.2)
|
b690b87094b4205b448ba1ea5dda546c3e7a976d | python/xi_plugin/style.py | python/xi_plugin/style.py | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgb_float(red, green, blue):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue))
return (0xFF << 24) | (red << 16) | (green << 8) | blue
| # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgba_float(red, green, blue, alpha=1):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha))
return (alpha << 24) | (red << 16) | (green << 8) | blue
| Support setting alpha in colors | Support setting alpha in colors
| Python | apache-2.0 | google/xi-editor,fuchsia-mirror/third_party-xi-editor,modelorganism/xi-editor,fuchsia-mirror/third_party-xi-editor,modelorganism/xi-editor,google/xi-editor,modelorganism/xi-editor,fuchsia-mirror/third_party-xi-editor,google/xi-editor,fuchsia-mirror/third_party-xi-editor,google/xi-editor | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgb_float(red, green, blue):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue))
return (0xFF << 24) | (red << 16) | (green << 8) | blue
Support setting alpha in colors | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgba_float(red, green, blue, alpha=1):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha))
return (alpha << 24) | (red << 16) | (green << 8) | blue
| <commit_before># Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgb_float(red, green, blue):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue))
return (0xFF << 24) | (red << 16) | (green << 8) | blue
<commit_msg>Support setting alpha in colors<commit_after> | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgba_float(red, green, blue, alpha=1):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha))
return (alpha << 24) | (red << 16) | (green << 8) | blue
| # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgb_float(red, green, blue):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue))
return (0xFF << 24) | (red << 16) | (green << 8) | blue
Support setting alpha in colors# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgba_float(red, green, blue, alpha=1):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha))
return (alpha << 24) | (red << 16) | (green << 8) | blue
| <commit_before># Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgb_float(red, green, blue):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue = map(lambda c: int(0xFF * c), (red, green, blue))
return (0xFF << 24) | (red << 16) | (green << 8) | blue
<commit_msg>Support setting alpha in colors<commit_after># Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides convenience methods for styling text."""
BOLD = 1
UNDERLINE = 2
ITALIC = 4
def color_for_rgba_float(red, green, blue, alpha=1):
if any(map(lambda x: x < 0 or x > 1, (red, green, blue, alpha))):
raise ValueError("Values must be in the range 0..1 (inclusive)")
red, green, blue, alpha = map(lambda c: int(0xFF * c), (red, green, blue, alpha))
return (alpha << 24) | (red << 16) | (green << 8) | blue
|
8dc1bab80e52442999eb59e096abd5848c4e8d66 | unicornclient/routine.py | unicornclient/routine.py | import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
return
self.process(data)
if got_task:
self.queue.task_done()
def process(self, data):
pass
| import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
self.is_stopping = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
self.is_stopping = True
self.process(data)
if got_task:
self.queue.task_done()
if self.is_stopping:
break
def process(self, data):
pass
| Allow one last call to process before stopping | Allow one last call to process before stopping
| Python | mit | amm0nite/unicornclient,amm0nite/unicornclient | import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
return
self.process(data)
if got_task:
self.queue.task_done()
def process(self, data):
pass
Allow one last call to process before stopping | import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
self.is_stopping = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
self.is_stopping = True
self.process(data)
if got_task:
self.queue.task_done()
if self.is_stopping:
break
def process(self, data):
pass
| <commit_before>import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
return
self.process(data)
if got_task:
self.queue.task_done()
def process(self, data):
pass
<commit_msg>Allow one last call to process before stopping<commit_after> | import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
self.is_stopping = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
self.is_stopping = True
self.process(data)
if got_task:
self.queue.task_done()
if self.is_stopping:
break
def process(self, data):
pass
| import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
return
self.process(data)
if got_task:
self.queue.task_done()
def process(self, data):
pass
Allow one last call to process before stoppingimport threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
self.is_stopping = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
self.is_stopping = True
self.process(data)
if got_task:
self.queue.task_done()
if self.is_stopping:
break
def process(self, data):
pass
| <commit_before>import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
return
self.process(data)
if got_task:
self.queue.task_done()
def process(self, data):
pass
<commit_msg>Allow one last call to process before stopping<commit_after>import threading
import queue
class Routine(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.queue = queue.Queue()
self.manager = None
self.no_wait = False
self.is_stopping = False
def run(self):
while True:
got_task = False
data = None
if self.no_wait:
try:
data = self.queue.get_nowait()
got_task = True
except queue.Empty:
data = None
got_task = False
else:
data = self.queue.get()
got_task = True
if data:
index = 'routine_command'
routine_command = data[index] if index in data else None
if routine_command == 'stop':
self.is_stopping = True
self.process(data)
if got_task:
self.queue.task_done()
if self.is_stopping:
break
def process(self, data):
pass
|
b1685dc4a0a2036378d47f07d7315e5b1935a4ad | hyrodactil/tests/openings/models.py | hyrodactil/tests/openings/models.py | from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220", initial=True,
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
| from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220",
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
| Fix tests for positioned stages | openings: Fix tests for positioned stages
| Python | mit | hizardapp/Hizard,hizardapp/Hizard,hizardapp/Hizard | from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220", initial=True,
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
openings: Fix tests for positioned stages | from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220",
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
| <commit_before>from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220", initial=True,
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
<commit_msg>openings: Fix tests for positioned stages<commit_after> | from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220",
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
| from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220", initial=True,
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
openings: Fix tests for positioned stagesfrom django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220",
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
| <commit_before>from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220", initial=True,
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
<commit_msg>openings: Fix tests for positioned stages<commit_after>from django.test import TestCase
from ..factories._applications import ApplicationFactory
from ..factories._companysettings import InterviewStageFactory
from ..factories._openings import OpeningFactory
class OpeningsModelsTests(TestCase):
def test_applicants_stats(self):
opening = OpeningFactory()
self.assertEqual(opening.applicants_stats(), [])
s1 = InterviewStageFactory(name="L220",
company=opening.company)
s2 = InterviewStageFactory(name="L33",
company=opening.company)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 0], [s2.name, 0]])
application = ApplicationFactory.create(opening=opening)
application.stage_transitions.create(stage=s1)
self.assertEqual(opening.applicants_stats(),
[[s1.name, 1], [s2.name, 0]])
|
6826f4ecef0573f22430e5ed040618f8ded4d73f | dmagellan/launch.py | dmagellan/launch.py | # run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
| # run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.854e51d.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
| Use new dmagellan worker tarball (854e51d) | Use new dmagellan worker tarball (854e51d)
| Python | apache-2.0 | matyasselmeci/dask_condor,matyasselmeci/dask_condor | # run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
Use new dmagellan worker tarball (854e51d) | # run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.854e51d.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
| <commit_before># run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
<commit_msg>Use new dmagellan worker tarball (854e51d)<commit_after> | # run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.854e51d.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
| # run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
Use new dmagellan worker tarball (854e51d)# run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.854e51d.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
| <commit_before># run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
<commit_msg>Use new dmagellan worker tarball (854e51d)<commit_after># run this with python -i
import dask, distributed
from distributed import Client
from distributed.deploy.adaptive import Adaptive
from dask_condor import HTCondorCluster
import logging, os
logging.basicConfig(level=0)
logging.getLogger("distributed.comm.tcp").setLevel(logging.ERROR)
logging.getLogger("distributed.deploy.adaptive").setLevel(logging.WARNING)
worker_tarball="dask_condor_worker_dmagellan.854e51d.SL6.tar.gz"
if os.path.exists(os.path.join('/squid/matyas', worker_tarball)):
worker_tarball = "http://proxy.chtc.wisc.edu/SQUID/matyas/" + worker_tarball
elif not os.path.exists(worker_tarball):
worker_tarball = "http://research.cs.wisc.edu/~matyas/dask_condor/" + worker_tarball
htc = HTCondorCluster(memory_per_worker=4096, update_interval=10000, worker_tarball=worker_tarball, logdir=".log")
cli = Client(htc)
sch = htc.scheduler
print("htc={0}\ncli={1}\nsch={2}".format(htc,cli,sch))
|
c579ff6d920922d317d2d11aa7ca7cefdefafaec | python_scripts/mc_solr.py | python_scripts/mc_solr.py | import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location + '/solr/collection1'
def solr_request( path, params):
url = get_solr_collection_url_prefix() + '/' + path
params['wt'] = 'json'
r = requests.get( url, params, headers = { 'Accept': 'application/json'})
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
| import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location() + '/solr/collection1'
def solr_request( path, params):
ipdb.set_trace()
url = get_solr_collection_url_prefix() + '/' + path
print 'url: {}'.format( url )
params['wt'] = 'json'
r = requests.get( url, params=params, headers = { 'Accept': 'application/json'})
print 'request url '
print r.url
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
print "starting"
print dataimport_full_import()
ipdb.set_trace()
print "exiting"
| Fix URL typo full_import ==> full-import | Fix URL typo full_import ==> full-import
| Python | agpl-3.0 | AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud | import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location + '/solr/collection1'
def solr_request( path, params):
url = get_solr_collection_url_prefix() + '/' + path
params['wt'] = 'json'
r = requests.get( url, params, headers = { 'Accept': 'application/json'})
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
Fix URL typo full_import ==> full-import | import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location() + '/solr/collection1'
def solr_request( path, params):
ipdb.set_trace()
url = get_solr_collection_url_prefix() + '/' + path
print 'url: {}'.format( url )
params['wt'] = 'json'
r = requests.get( url, params=params, headers = { 'Accept': 'application/json'})
print 'request url '
print r.url
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
print "starting"
print dataimport_full_import()
ipdb.set_trace()
print "exiting"
| <commit_before>import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location + '/solr/collection1'
def solr_request( path, params):
url = get_solr_collection_url_prefix() + '/' + path
params['wt'] = 'json'
r = requests.get( url, params, headers = { 'Accept': 'application/json'})
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
<commit_msg>Fix URL typo full_import ==> full-import<commit_after> | import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location() + '/solr/collection1'
def solr_request( path, params):
ipdb.set_trace()
url = get_solr_collection_url_prefix() + '/' + path
print 'url: {}'.format( url )
params['wt'] = 'json'
r = requests.get( url, params=params, headers = { 'Accept': 'application/json'})
print 'request url '
print r.url
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
print "starting"
print dataimport_full_import()
ipdb.set_trace()
print "exiting"
| import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location + '/solr/collection1'
def solr_request( path, params):
url = get_solr_collection_url_prefix() + '/' + path
params['wt'] = 'json'
r = requests.get( url, params, headers = { 'Accept': 'application/json'})
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
Fix URL typo full_import ==> full-importimport requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location() + '/solr/collection1'
def solr_request( path, params):
ipdb.set_trace()
url = get_solr_collection_url_prefix() + '/' + path
print 'url: {}'.format( url )
params['wt'] = 'json'
r = requests.get( url, params=params, headers = { 'Accept': 'application/json'})
print 'request url '
print r.url
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
print "starting"
print dataimport_full_import()
ipdb.set_trace()
print "exiting"
| <commit_before>import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location + '/solr/collection1'
def solr_request( path, params):
url = get_solr_collection_url_prefix() + '/' + path
params['wt'] = 'json'
r = requests.get( url, params, headers = { 'Accept': 'application/json'})
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full_import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
<commit_msg>Fix URL typo full_import ==> full-import<commit_after>import requests
import ipdb
import mc_config
import psycopg2
import psycopg2.extras
import time
def get_solr_location():
##TODO -- get this from the yaml file
return 'http://localhost:8983'
def get_solr_collection_url_prefix():
return get_solr_location() + '/solr/collection1'
def solr_request( path, params):
ipdb.set_trace()
url = get_solr_collection_url_prefix() + '/' + path
print 'url: {}'.format( url )
params['wt'] = 'json'
r = requests.get( url, params=params, headers = { 'Accept': 'application/json'})
print 'request url '
print r.url
data = r.json()
return data
def dataimport_command( command, params={}):
params['command'] = command
return solr_request( 'dataimport', params )
def dataimport_status():
return dataimport_command( 'status' )
def dataimport_delta_import():
params = {
'commit': 'true',
'clean': 'false',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_full_import():
params = {
'commit': 'true',
'clean': 'true',
}
##Note: We're using the delta import through full import approach
return dataimport_command( 'full-import', params )
def dataimport_reload_config():
return dataimport_command( 'reload' )
print "starting"
print dataimport_full_import()
ipdb.set_trace()
print "exiting"
|
8c78095c75c5862de12d33e94610a35bfbacf2bf | react_router/templates.py | react_router/templates.py | MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.React, {var}.routes, {var}.router, '{container_id}');
"""
| MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.ReactDOM, {var}.routes, {var}.router, '{container_id}');
"""
| Update template to use new react-dom | Update template to use new react-dom
In React 0.14, the DOM manipulation algorithms have been pulled out into
a new package called react-dom. We update our template to use this new
package.
| Python | mit | HorizonXP/python-react-router,HorizonXP/python-react-router,HorizonXP/python-react-router | MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.React, {var}.routes, {var}.router, '{container_id}');
"""
Update template to use new react-dom
In React 0.14, the DOM manipulation algorithms have been pulled out into
a new package called react-dom. We update our template to use this new
package. | MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.ReactDOM, {var}.routes, {var}.router, '{container_id}');
"""
| <commit_before>MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.React, {var}.routes, {var}.router, '{container_id}');
"""
<commit_msg>Update template to use new react-dom
In React 0.14, the DOM manipulation algorithms have been pulled out into
a new package called react-dom. We update our template to use this new
package.<commit_after> | MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.ReactDOM, {var}.routes, {var}.router, '{container_id}');
"""
| MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.React, {var}.routes, {var}.router, '{container_id}');
"""
Update template to use new react-dom
In React 0.14, the DOM manipulation algorithms have been pulled out into
a new package called react-dom. We update our template to use this new
package.MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.ReactDOM, {var}.routes, {var}.router, '{container_id}');
"""
| <commit_before>MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.React, {var}.routes, {var}.router, '{container_id}');
"""
<commit_msg>Update template to use new react-dom
In React 0.14, the DOM manipulation algorithms have been pulled out into
a new package called react-dom. We update our template to use this new
package.<commit_after>MOUNT_JS = \
"""
if (typeof {var}.React === 'undefined') throw new Error('Cannot find `React` variable. Have you added an object to your JS export which points to React?');
if (typeof {var}.router === 'undefined') throw new Error('Cannot find `router` variable. Have you added an object to your JS export which points to a function that returns a react-router.Router?');
if (typeof {var} === 'undefined') throw new Error('Cannot find component variable `{var}`');
(function(React, routes, router, containerId) {{
var props = {props};
var element = router(routes, props);
var container = document.getElementById(containerId);
if (!container) throw new Error('Cannot find the container element `#{container_id}` for component `{var}`');
React.render(element, container);
}})({var}.ReactDOM, {var}.routes, {var}.router, '{container_id}');
"""
|
ac27cb2348748a774ab2ae14ade2c49de94c2b4f | frigg/worker/cli.py | frigg/worker/cli.py | # -*- coding: utf8 -*-
from fabric import colors
from frigg.worker.fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main() | # -*- coding: utf8 -*-
import os
import logging.config
from fabric import colors
from .fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
try:
logging.config.fileConfig(os.path.expanduser('~/.frigg/logging.conf'))
except Exception, e:
print("There is a problem with the logging config:\n%s" % e)
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main() | Add loading of logging config | Add loading of logging config
| Python | mit | frigg/frigg-worker | # -*- coding: utf8 -*-
from fabric import colors
from frigg.worker.fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main()Add loading of logging config | # -*- coding: utf8 -*-
import os
import logging.config
from fabric import colors
from .fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
try:
logging.config.fileConfig(os.path.expanduser('~/.frigg/logging.conf'))
except Exception, e:
print("There is a problem with the logging config:\n%s" % e)
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main() | <commit_before># -*- coding: utf8 -*-
from fabric import colors
from frigg.worker.fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main()<commit_msg>Add loading of logging config<commit_after> | # -*- coding: utf8 -*-
import os
import logging.config
from fabric import colors
from .fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
try:
logging.config.fileConfig(os.path.expanduser('~/.frigg/logging.conf'))
except Exception, e:
print("There is a problem with the logging config:\n%s" % e)
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main() | # -*- coding: utf8 -*-
from fabric import colors
from frigg.worker.fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main()Add loading of logging config# -*- coding: utf8 -*-
import os
import logging.config
from fabric import colors
from .fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
try:
logging.config.fileConfig(os.path.expanduser('~/.frigg/logging.conf'))
except Exception, e:
print("There is a problem with the logging config:\n%s" % e)
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main() | <commit_before># -*- coding: utf8 -*-
from fabric import colors
from frigg.worker.fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main()<commit_msg>Add loading of logging config<commit_after># -*- coding: utf8 -*-
import os
import logging.config
from fabric import colors
from .fetcher import fetcher
class Commands(object):
@staticmethod
def start():
print(colors.green("Starting frigg worker"))
fetcher()
@staticmethod
def unknown_command():
print(colors.red("Unknown command"))
def main():
import argparse
try:
logging.config.fileConfig(os.path.expanduser('~/.frigg/logging.conf'))
except Exception, e:
print("There is a problem with the logging config:\n%s" % e)
parser = argparse.ArgumentParser(description='Do some work for frigg.')
parser.add_argument('command')
args = parser.parse_args()
getattr(Commands, args.command, Commands.unknown_command)()
if __name__ == '__main__':
main() |
a4fcd6c4f628de22064ea054bac5603838b35459 | councilmatic_core/migrations/0041_event_extras.py | councilmatic_core/migrations/0041_event_extras.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = jsonb_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = json_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
| Use json_build_object, rather than jsonb_build_object | Use json_build_object, rather than jsonb_build_object
| Python | mit | datamade/django-councilmatic,datamade/django-councilmatic,datamade/django-councilmatic,datamade/django-councilmatic | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = jsonb_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
Use json_build_object, rather than jsonb_build_object | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = json_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = jsonb_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
<commit_msg>Use json_build_object, rather than jsonb_build_object<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = json_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = jsonb_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
Use json_build_object, rather than jsonb_build_object# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = json_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = jsonb_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
<commit_msg>Use json_build_object, rather than jsonb_build_object<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-05-07 16:20
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('councilmatic_core', '0040_mediaevent_meta'),
]
operations = [
migrations.AddField(
model_name='event',
name='extras',
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.RunSQL('''
UPDATE councilmatic_core_event
SET extras = json_build_object('guid', guid)
WHERE guid IS NOT NULL
''', reverse_sql='''
UPDATE councilmatic_core_event
SET guid = extras->'guid'
WHERE extras->'guid' IS NOT NULL
'''),
migrations.RemoveField(
model_name='event',
name='guid',
),
]
|
98e5a1fe20e6eefa108ca3e5323da1bf3ad65be9 | corehq/apps/hqadmin/management/commands/shutdown_celery_worker_by_hostname.py | corehq/apps/hqadmin/management/commands/shutdown_celery_worker_by_hostname.py | from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
| from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
from corehq.apps.hqadmin.utils import parse_celery_pings
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
worker_responses = celery.control.ping(timeout=10, destination=[hostname])
pings = parse_celery_pings(worker_responses)
if hostname in pings:
print 'Did not shutdown worker'
exit(1)
print 'Successfully initiated warm shutdown'
| Exit 1 based on whether or not it was shutdown correctly | Exit 1 based on whether or not it was shutdown correctly
| Python | bsd-3-clause | dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq | from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
Exit 1 based on whether or not it was shutdown correctly | from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
from corehq.apps.hqadmin.utils import parse_celery_pings
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
worker_responses = celery.control.ping(timeout=10, destination=[hostname])
pings = parse_celery_pings(worker_responses)
if hostname in pings:
print 'Did not shutdown worker'
exit(1)
print 'Successfully initiated warm shutdown'
| <commit_before>from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
<commit_msg>Exit 1 based on whether or not it was shutdown correctly<commit_after> | from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
from corehq.apps.hqadmin.utils import parse_celery_pings
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
worker_responses = celery.control.ping(timeout=10, destination=[hostname])
pings = parse_celery_pings(worker_responses)
if hostname in pings:
print 'Did not shutdown worker'
exit(1)
print 'Successfully initiated warm shutdown'
| from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
Exit 1 based on whether or not it was shutdown correctlyfrom django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
from corehq.apps.hqadmin.utils import parse_celery_pings
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
worker_responses = celery.control.ping(timeout=10, destination=[hostname])
pings = parse_celery_pings(worker_responses)
if hostname in pings:
print 'Did not shutdown worker'
exit(1)
print 'Successfully initiated warm shutdown'
| <commit_before>from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
<commit_msg>Exit 1 based on whether or not it was shutdown correctly<commit_after>from django.core.management.base import BaseCommand
from django.conf import settings
from celery import Celery
from corehq.apps.hqadmin.utils import parse_celery_pings
class Command(BaseCommand):
help = "Gracefully shutsdown a celery worker"
args = 'hostname'
def handle(self, hostname, *args, **options):
celery = Celery()
celery.config_from_object(settings)
celery.control.broadcast('shutdown', destination=[hostname])
worker_responses = celery.control.ping(timeout=10, destination=[hostname])
pings = parse_celery_pings(worker_responses)
if hostname in pings:
print 'Did not shutdown worker'
exit(1)
print 'Successfully initiated warm shutdown'
|
2d1488669721a46350b5c0f06a049f5d4816f931 | sauna/plugins/ext/disk.py | sauna/plugins/ext/disk.py | from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
'''
| import os
from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@my_plugin.check()
def used_inodes_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
s = os.statvfs(part.mountpoint)
try:
inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files)
except ZeroDivisionError:
continue
status = self._value_to_status_less(
inodes_usage, check_config, self._strip_percent_sign
)
if status != self.STATUS_OK:
return (
status,
'Partition {} uses {}% of inodes'.format(part.mountpoint,
inodes_usage)
)
return self.STATUS_OK, 'Inodes usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
- type: used_inodes_percent
warn: 80%
crit: 90%
'''
| Create Disk check to monitor inodes | Create Disk check to monitor inodes
| Python | bsd-2-clause | bewiwi/sauna,NicolasLM/sauna,NicolasLM/sauna,bewiwi/sauna | from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
'''
Create Disk check to monitor inodes | import os
from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@my_plugin.check()
def used_inodes_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
s = os.statvfs(part.mountpoint)
try:
inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files)
except ZeroDivisionError:
continue
status = self._value_to_status_less(
inodes_usage, check_config, self._strip_percent_sign
)
if status != self.STATUS_OK:
return (
status,
'Partition {} uses {}% of inodes'.format(part.mountpoint,
inodes_usage)
)
return self.STATUS_OK, 'Inodes usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
- type: used_inodes_percent
warn: 80%
crit: 90%
'''
| <commit_before>from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
'''
<commit_msg>Create Disk check to monitor inodes<commit_after> | import os
from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@my_plugin.check()
def used_inodes_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
s = os.statvfs(part.mountpoint)
try:
inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files)
except ZeroDivisionError:
continue
status = self._value_to_status_less(
inodes_usage, check_config, self._strip_percent_sign
)
if status != self.STATUS_OK:
return (
status,
'Partition {} uses {}% of inodes'.format(part.mountpoint,
inodes_usage)
)
return self.STATUS_OK, 'Inodes usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
- type: used_inodes_percent
warn: 80%
crit: 90%
'''
| from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
'''
Create Disk check to monitor inodesimport os
from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@my_plugin.check()
def used_inodes_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
s = os.statvfs(part.mountpoint)
try:
inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files)
except ZeroDivisionError:
continue
status = self._value_to_status_less(
inodes_usage, check_config, self._strip_percent_sign
)
if status != self.STATUS_OK:
return (
status,
'Partition {} uses {}% of inodes'.format(part.mountpoint,
inodes_usage)
)
return self.STATUS_OK, 'Inodes usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
- type: used_inodes_percent
warn: 80%
crit: 90%
'''
| <commit_before>from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
'''
<commit_msg>Create Disk check to monitor inodes<commit_after>import os
from sauna.plugins import PluginRegister
from sauna.plugins.base import PsutilPlugin
my_plugin = PluginRegister('Disk')
@my_plugin.plugin()
class Disk(PsutilPlugin):
@my_plugin.check()
def used_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
part_usage = self.psutil.disk_usage(part.mountpoint).percent
status = self._value_to_status_less(part_usage, check_config)
if status > 0:
return (
status,
'Partition {} is full at {}%'.format(part.mountpoint,
part_usage)
)
return 0, 'Disk usage correct'
@my_plugin.check()
def used_inodes_percent(self, check_config):
check_config = self._strip_percent_sign_from_check_config(check_config)
for part in self.psutil.disk_partitions(all=False):
s = os.statvfs(part.mountpoint)
try:
inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files)
except ZeroDivisionError:
continue
status = self._value_to_status_less(
inodes_usage, check_config, self._strip_percent_sign
)
if status != self.STATUS_OK:
return (
status,
'Partition {} uses {}% of inodes'.format(part.mountpoint,
inodes_usage)
)
return self.STATUS_OK, 'Inodes usage correct'
@staticmethod
def config_sample():
return '''
# Usage of disks
Disk:
checks:
- type: used_percent
warn: 80%
crit: 90%
- type: used_inodes_percent
warn: 80%
crit: 90%
'''
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.