commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d4f21288e7ba6bdc27f0f01fd0dba394a9786aa6
|
open_humans/utilities.py
|
open_humans/utilities.py
|
import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
env = '[root]\n' + io.open('.env', 'r').read()
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
|
import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
try:
env = '[root]\n' + io.open('.env', 'r').read()
except IOError:
return
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
|
Fix crash if .env does not exist
|
Fix crash if .env does not exist
|
Python
|
mit
|
PersonalGenomesOrg/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans
|
import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
env = '[root]\n' + io.open('.env', 'r').read()
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
Fix crash if .env does not exist
|
import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
try:
env = '[root]\n' + io.open('.env', 'r').read()
except IOError:
return
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
|
<commit_before>import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
env = '[root]\n' + io.open('.env', 'r').read()
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
<commit_msg>Fix crash if .env does not exist<commit_after>
|
import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
try:
env = '[root]\n' + io.open('.env', 'r').read()
except IOError:
return
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
|
import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
env = '[root]\n' + io.open('.env', 'r').read()
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
Fix crash if .env does not existimport io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
try:
env = '[root]\n' + io.open('.env', 'r').read()
except IOError:
return
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
|
<commit_before>import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
env = '[root]\n' + io.open('.env', 'r').read()
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
<commit_msg>Fix crash if .env does not exist<commit_after>import io
import os
from ConfigParser import RawConfigParser
def apply_env():
"""
Read the `.env` file and apply it to os.environ just like using `foreman
run` would.
"""
try:
env = '[root]\n' + io.open('.env', 'r').read()
except IOError:
return
config = RawConfigParser(allow_no_value=True)
# Use `str` instead of the regular option transform to preserve option case
config.optionxform = str
config.readfp(io.StringIO(env), '.env')
os.environ.update(config.items('root'))
|
95bade35933956ea22fcec0313e14cd8ceb75656
|
portal_sale_distributor/models/sale_order.py
|
portal_sale_distributor/models/sale_order.py
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.sudo().message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
|
Use sudo to prevent errors with signup_get_auth_param.
|
[FIX] Use sudo to prevent errors with signup_get_auth_param.
|
Python
|
agpl-3.0
|
ingadhoc/sale,ingadhoc/sale,ingadhoc/sale,ingadhoc/sale
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
[FIX] Use sudo to prevent errors with signup_get_auth_param.
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.sudo().message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
|
<commit_before>##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
<commit_msg>[FIX] Use sudo to prevent errors with signup_get_auth_param.<commit_after>
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.sudo().message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
|
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
[FIX] Use sudo to prevent errors with signup_get_auth_param.##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.sudo().message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
|
<commit_before>##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
<commit_msg>[FIX] Use sudo to prevent errors with signup_get_auth_param.<commit_after>##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, fields, api, _
class SaleOrder(models.Model):
_inherit = 'sale.order'
activity_date_deadline = fields.Date(
groups="base.group_user,"
"portal_sale_distributor.group_portal_distributor"
)
def action_confirm_distributor(self):
self.sudo().message_post(
body=_("Pedido confirmado por %s") % self.env.user.name,
subtype='mt_comment')
self = self.sudo()
return self.action_confirm()
@api.onchange('partner_id')
def onchange_partner_id_warning(self):
""" desactivamos warning para portal distributor
"""
if self.env.user.has_group(
'portal_sale_distributor.group_portal_distributor'):
return {}
else:
return super().onchange_partner_id_warning()
|
8353339f9a907767a6cb89d4e65497e7adb541d9
|
fridge/test/test_memoryfs.py
|
fridge/test/test_memoryfs.py
|
from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
|
from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
def test_close_flushes_content(self):
f = MemoryFile()
f.write('test')
f.close()
assert f.content == 'test'
def test_can_be_reopened_and_read(self):
f = MemoryFile()
f.write('test')
f.close()
f.open()
assert f.read() == 'test'
|
Add more tests for MemoryFile.
|
Add more tests for MemoryFile.
|
Python
|
mit
|
jgosmann/fridge,jgosmann/fridge
|
from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
Add more tests for MemoryFile.
|
from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
def test_close_flushes_content(self):
f = MemoryFile()
f.write('test')
f.close()
assert f.content == 'test'
def test_can_be_reopened_and_read(self):
f = MemoryFile()
f.write('test')
f.close()
f.open()
assert f.read() == 'test'
|
<commit_before>from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
<commit_msg>Add more tests for MemoryFile.<commit_after>
|
from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
def test_close_flushes_content(self):
f = MemoryFile()
f.write('test')
f.close()
assert f.content == 'test'
def test_can_be_reopened_and_read(self):
f = MemoryFile()
f.write('test')
f.close()
f.open()
assert f.read() == 'test'
|
from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
Add more tests for MemoryFile.from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
def test_close_flushes_content(self):
f = MemoryFile()
f.write('test')
f.close()
assert f.content == 'test'
def test_can_be_reopened_and_read(self):
f = MemoryFile()
f.write('test')
f.close()
f.open()
assert f.read() == 'test'
|
<commit_before>from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
<commit_msg>Add more tests for MemoryFile.<commit_after>from fridge.memoryfs import MemoryFile
class TestMemoryFile(object):
def test_can_be_written(self):
f = MemoryFile()
f.write('test')
f.flush()
assert f.content == 'test'
def test_close_flushes_content(self):
f = MemoryFile()
f.write('test')
f.close()
assert f.content == 'test'
def test_can_be_reopened_and_read(self):
f = MemoryFile()
f.write('test')
f.close()
f.open()
assert f.read() == 'test'
|
84fd94949e14fd259f20aaa262de269a6cd804f0
|
pwndbg/malloc.py
|
pwndbg/malloc.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return p + (2-pwndbg.arch.ptrsize)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return mem - (2*pwndbg.arch.ptrsize)
|
Correct the conversion of mem2chunk
|
Correct the conversion of mem2chunk
|
Python
|
mit
|
0xddaa/pwndbg,0xddaa/pwndbg,anthraxx/pwndbg,cebrusfs/217gdb,disconnect3d/pwndbg,disconnect3d/pwndbg,cebrusfs/217gdb,cebrusfs/217gdb,chubbymaggie/pwndbg,pwndbg/pwndbg,anthraxx/pwndbg,pwndbg/pwndbg,zachriggle/pwndbg,anthraxx/pwndbg,0xddaa/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg,disconnect3d/pwndbg,chubbymaggie/pwndbg,zachriggle/pwndbg,pwndbg/pwndbg,anthraxx/pwndbg
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return p + (2-pwndbg.arch.ptrsize)
Correct the conversion of mem2chunk
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return mem - (2*pwndbg.arch.ptrsize)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return p + (2-pwndbg.arch.ptrsize)
<commit_msg>Correct the conversion of mem2chunk<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return mem - (2*pwndbg.arch.ptrsize)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return p + (2-pwndbg.arch.ptrsize)
Correct the conversion of mem2chunk#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return mem - (2*pwndbg.arch.ptrsize)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return p + (2-pwndbg.arch.ptrsize)
<commit_msg>Correct the conversion of mem2chunk<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Describes the EGLIBC heap mechanisms.
Work-in-progress.
"""
import pwndbg.arch
import pwndbg.events
did_warn_once = False
malloc_chunk = None
@pwndbg.events.new_objfile
def load_malloc_chunk():
malloc_chunk = None
def chunk2mem(p):
"conversion from malloc header to user pointer"
return p + (2*pwndbg.arch.ptrsize)
def mem2chunk(mem):
"conversion from user pointer to malloc header"
return mem - (2*pwndbg.arch.ptrsize)
|
4ba31b7c0cce69693df383cb875705d7e66c2945
|
admin/base/migrations/0001_groups.py
|
admin/base/migrations/0001_groups.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_admin')
if created:
logger.info('osf_admin group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
|
Add group with more future permissions
|
Add group with more future permissions
|
Python
|
apache-2.0
|
monikagrabowska/osf.io,leb2dg/osf.io,pattisdr/osf.io,zachjanicki/osf.io,doublebits/osf.io,jnayak1/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,mfraezz/osf.io,emetsger/osf.io,binoculars/osf.io,Nesiehr/osf.io,zamattiac/osf.io,amyshi188/osf.io,jnayak1/osf.io,pattisdr/osf.io,rdhyee/osf.io,crcresearch/osf.io,abought/osf.io,mluo613/osf.io,laurenrevere/osf.io,binoculars/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,mluke93/osf.io,adlius/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,emetsger/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,kwierman/osf.io,amyshi188/osf.io,aaxelb/osf.io,mluo613/osf.io,Nesiehr/osf.io,sloria/osf.io,kwierman/osf.io,cwisecarver/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,mattclark/osf.io,chennan47/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,chennan47/osf.io,felliott/osf.io,mluke93/osf.io,asanfilippo7/osf.io,billyhunt/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,kwierman/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,abought/osf.io,alexschiller/osf.io,aaxelb/osf.io,alexschiller/osf.io,caneruguz/osf.io,emetsger/osf.io,alexschiller/osf.io,doublebits/osf.io,cslzchen/osf.io,caneruguz/osf.io,chrisseto/osf.io,kch8qx/osf.io,sloria/osf.io,DanielSBrown/osf.io,acshi/osf.io,hmoco/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,erinspace/osf.io,caneruguz/osf.io,samchrisinger/osf.io,zamattiac/osf.io,zamattiac/osf.io,chrisseto/osf.io,baylee-d/osf.io,SSJohns/osf.io,hmoco/osf.io,mluke93/osf.io,erinspace/osf.io,abought/osf.io,icereval/osf.io,SSJohns/osf.io,billyhunt/osf.io,mluo613/osf.io,wearpants/osf.io,adlius/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,sloria/osf.io,icereval/osf.io,monikagrabowska/osf.io,doublebits/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,saradbowman/osf.io,kch8qx/osf.io,RomanZWang/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,TomBaxter/osf.io,hmoco/osf.io,monikagrabowska/osf.io,felliott/osf.io,RomanZWang/osf.io,billyhunt/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,TomHeatwole/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,zamattiac/osf.io,chennan47/osf.io,amyshi188/osf.io,mluke93/osf.io,billyhunt/osf.io,zachjanicki/osf.io,felliott/osf.io,RomanZWang/osf.io,SSJohns/osf.io,doublebits/osf.io,aaxelb/osf.io,cslzchen/osf.io,baylee-d/osf.io,leb2dg/osf.io,aaxelb/osf.io,adlius/osf.io,caseyrollins/osf.io,baylee-d/osf.io,amyshi188/osf.io,samchrisinger/osf.io,wearpants/osf.io,hmoco/osf.io,erinspace/osf.io,leb2dg/osf.io,icereval/osf.io,leb2dg/osf.io,mfraezz/osf.io,kwierman/osf.io,chrisseto/osf.io,wearpants/osf.io,mattclark/osf.io,cwisecarver/osf.io,binoculars/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,doublebits/osf.io,adlius/osf.io,mfraezz/osf.io,rdhyee/osf.io,acshi/osf.io,abought/osf.io,jnayak1/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,DanielSBrown/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,wearpants/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,felliott/osf.io,mattclark/osf.io,chrisseto/osf.io,mluo613/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,saradbowman/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,emetsger/osf.io
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
Add group with more future permissions
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_admin')
if created:
logger.info('osf_admin group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
<commit_msg>Add group with more future permissions<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_admin')
if created:
logger.info('osf_admin group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
Add group with more future permissions# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_admin')
if created:
logger.info('osf_admin group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
<commit_msg>Add group with more future permissions<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.contrib.auth.models import Group
import logging
logger = logging.getLogger(__file__)
def add_groups(*args):
print args
group, created = Group.objects.get_or_create(name='prereg_group')
if created:
logger.info('prereg_group created')
group, created = Group.objects.get_or_create(name='osf_admin')
if created:
logger.info('osf_admin group created')
group, created = Group.objects.get_or_create(name='osf_group')
if created:
logger.info('osf_group created')
class Migration(migrations.Migration):
operations = [
migrations.RunPython(add_groups),
]
|
faf35a814d045ce3d71921ed0d4ac268d5a9811c
|
app/notify_client/provider_client.py
|
app/notify_client/provider_client.py
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
Add provider client method to get provider version history
|
Add provider client method to get provider version history
|
Python
|
mit
|
gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
Add provider client method to get provider version history
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
<commit_before>
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
<commit_msg>Add provider client method to get provider version history<commit_after>
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
Add provider client method to get provider version history
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
<commit_before>
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
<commit_msg>Add provider client method to get provider version history<commit_after>
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
101bfdc1552922d4a58defcb622006c432381df6
|
contrib/examples/sensors/fibonacci_sensor.py
|
contrib/examples/sensors/fibonacci_sensor.py
|
from st2reactor.sensor.base import PollingSensor
from environ import get_environ
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": get_environ("PYTHONPATH")
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
|
import os
from st2reactor.sensor.base import PollingSensor
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": os.environ.get("PYTHONPATH", None)
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
|
Fix fibonacci sensor so it works under Python 3.
|
Fix fibonacci sensor so it works under Python 3.
|
Python
|
apache-2.0
|
nzlosh/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,nzlosh/st2,Plexxi/st2,StackStorm/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,Plexxi/st2
|
from st2reactor.sensor.base import PollingSensor
from environ import get_environ
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": get_environ("PYTHONPATH")
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
Fix fibonacci sensor so it works under Python 3.
|
import os
from st2reactor.sensor.base import PollingSensor
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": os.environ.get("PYTHONPATH", None)
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
|
<commit_before>from st2reactor.sensor.base import PollingSensor
from environ import get_environ
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": get_environ("PYTHONPATH")
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
<commit_msg>Fix fibonacci sensor so it works under Python 3.<commit_after>
|
import os
from st2reactor.sensor.base import PollingSensor
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": os.environ.get("PYTHONPATH", None)
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
|
from st2reactor.sensor.base import PollingSensor
from environ import get_environ
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": get_environ("PYTHONPATH")
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
Fix fibonacci sensor so it works under Python 3.import os
from st2reactor.sensor.base import PollingSensor
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": os.environ.get("PYTHONPATH", None)
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
|
<commit_before>from st2reactor.sensor.base import PollingSensor
from environ import get_environ
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": get_environ("PYTHONPATH")
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
<commit_msg>Fix fibonacci sensor so it works under Python 3.<commit_after>import os
from st2reactor.sensor.base import PollingSensor
class FibonacciSensor(PollingSensor):
def __init__(self, sensor_service, config,
poll_interval=5):
super(FibonacciSensor, self).__init__(
sensor_service=sensor_service,
config=config,
poll_interval=poll_interval
)
self.a = None
self.b = None
self.count = None
self.logger = None
def setup(self):
self.a = 0
self.b = 1
self.count = 2
self.logger = self.sensor_service.get_logger(name=self.__class__.__name__)
def poll(self):
fib = self.a + self.b
self.logger.debug('Count: %d, a: %d, b: %d', self.count, self.a, self.b)
payload = {
"count": self.count,
"fibonacci": fib,
"pythonpath": os.environ.get("PYTHONPATH", None)
}
self.sensor_service.dispatch(trigger="examples.fibonacci", payload=payload)
self.a = self.b
self.b = fib
self.count = self.count + 1
def cleanup(self):
pass
def add_trigger(self, trigger):
# This method is called when trigger is created
pass
def update_trigger(self, trigger):
# This method is called when trigger is updated
pass
def remove_trigger(self, trigger):
# This method is called when trigger is deleted
pass
|
6d5697a72793f50054fdfc268115fd8afb62969a
|
yunity/utils/tests/mock.py
|
yunity/utils/tests/mock.py
|
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockChat(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
administrated_by = SubFactory(MockUser)
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
|
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockConversation(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
|
Refactor MockConversation to new model
|
Refactor MockConversation to new model
Renamed MockChat to MockConversation, remove removed administratedBy
Trait
|
Python
|
agpl-3.0
|
yunity/foodsaving-backend,yunity/yunity-core,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend
|
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockChat(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
administrated_by = SubFactory(MockUser)
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
Refactor MockConversation to new model
Renamed MockChat to MockConversation, remove removed administratedBy
Trait
|
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockConversation(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
|
<commit_before>from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockChat(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
administrated_by = SubFactory(MockUser)
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
<commit_msg>Refactor MockConversation to new model
Renamed MockChat to MockConversation, remove removed administratedBy
Trait<commit_after>
|
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockConversation(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
|
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockChat(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
administrated_by = SubFactory(MockUser)
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
Refactor MockConversation to new model
Renamed MockChat to MockConversation, remove removed administratedBy
Traitfrom factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockConversation(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
|
<commit_before>from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockChat(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
administrated_by = SubFactory(MockUser)
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
<commit_msg>Refactor MockConversation to new model
Renamed MockChat to MockConversation, remove removed administratedBy
Trait<commit_after>from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, post_generation, SubFactory, PostGeneration
from yunity.utils.tests.fake import faker
class Mock(DjangoModelFactory):
class Meta:
strategy = CREATE_STRATEGY
model = None
abstract = True
class MockUser(Mock):
class Meta:
model = "yunity.User"
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
class MockConversation(Mock):
class Meta:
model = "yunity.Conversation"
strategy = CREATE_STRATEGY
@post_generation
def participants(self, created, participants, **kwargs):
if not created:
return
if participants:
for participant in participants:
self.participants.add(participant)
|
55f6b85e0c376ba56a2ce860fd8d33011c34bc7e
|
python/problem2.py
|
python/problem2.py
|
def fib(size=-1):
def inner():
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
return inner()
print sum((i for i in fib() if i % 2 == 0))
|
def fib(size=-1):
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
print sum((i for i in fib() if i % 2 == 0))
|
Update probelm 2's generator function
|
Update probelm 2's generator function
|
Python
|
mit
|
jreese/euler,jreese/euler,jreese/euler,jreese/euler
|
def fib(size=-1):
def inner():
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
return inner()
print sum((i for i in fib() if i % 2 == 0))
Update probelm 2's generator function
|
def fib(size=-1):
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
print sum((i for i in fib() if i % 2 == 0))
|
<commit_before>
def fib(size=-1):
def inner():
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
return inner()
print sum((i for i in fib() if i % 2 == 0))
<commit_msg>Update probelm 2's generator function<commit_after>
|
def fib(size=-1):
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
print sum((i for i in fib() if i % 2 == 0))
|
def fib(size=-1):
def inner():
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
return inner()
print sum((i for i in fib() if i % 2 == 0))
Update probelm 2's generator function
def fib(size=-1):
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
print sum((i for i in fib() if i % 2 == 0))
|
<commit_before>
def fib(size=-1):
def inner():
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
return inner()
print sum((i for i in fib() if i % 2 == 0))
<commit_msg>Update probelm 2's generator function<commit_after>
def fib(size=-1):
count = 0
last = 0
current = 1
while size < 0 or count < size:
last, current = current, last + current
if current > 4000000:
break
count += 1
yield current
print sum((i for i in fib() if i % 2 == 0))
|
776861eeed4244185592f8bda6dea4cb5540423d
|
cpt/__init__.py
|
cpt/__init__.py
|
__version__ = '0.34.5-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
|
__version__ = '0.35.0-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
|
Update development verstion to 0.35.0
|
Update development verstion to 0.35.0
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
|
Python
|
mit
|
conan-io/conan-package-tools
|
__version__ = '0.34.5-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
Update development verstion to 0.35.0
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
|
__version__ = '0.35.0-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
|
<commit_before>
__version__ = '0.34.5-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
<commit_msg>Update development verstion to 0.35.0
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>
|
__version__ = '0.35.0-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
|
__version__ = '0.34.5-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
Update development verstion to 0.35.0
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
__version__ = '0.35.0-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
|
<commit_before>
__version__ = '0.34.5-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
<commit_msg>Update development verstion to 0.35.0
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>
__version__ = '0.35.0-dev'
def get_client_version():
from conans.model.version import Version
from conans import __version__ as client_version
from os import getenv
# It is a mess comparing dev versions, lets assume that the -dev is the further release
return Version(client_version.replace("-dev", ""))
|
f4851040b74a0c88980a1e82a8b518bd6147f508
|
FF4P/Abilities.py
|
FF4P/Abilities.py
|
import csv
abilityList = {}
def loadAbilities():
global abilityList
with open('FF4/FF4Abil.csv', 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
import os
import csv
abilityList = {}
def loadAbilities():
global abilityList
fileName = "FF4P/FF4P_Abil.csv"
if not os.path.exists(fileName):
fileName = "FF4P_Abil.csv"
with open(fileName, 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
Fix Filename Errors Module folder had changed at some point in the past, fixed the file path so it could find the CSV
|
Fix Filename Errors
Module folder had changed at some point in the past, fixed the file
path so it could find the CSV
|
Python
|
mit
|
einSynd/PyIRC
|
import csv
abilityList = {}
def loadAbilities():
global abilityList
with open('FF4/FF4Abil.csv', 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return noneFix Filename Errors
Module folder had changed at some point in the past, fixed the file
path so it could find the CSV
|
import os
import csv
abilityList = {}
def loadAbilities():
global abilityList
fileName = "FF4P/FF4P_Abil.csv"
if not os.path.exists(fileName):
fileName = "FF4P_Abil.csv"
with open(fileName, 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
<commit_before>import csv
abilityList = {}
def loadAbilities():
global abilityList
with open('FF4/FF4Abil.csv', 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none<commit_msg>Fix Filename Errors
Module folder had changed at some point in the past, fixed the file
path so it could find the CSV<commit_after>
|
import os
import csv
abilityList = {}
def loadAbilities():
global abilityList
fileName = "FF4P/FF4P_Abil.csv"
if not os.path.exists(fileName):
fileName = "FF4P_Abil.csv"
with open(fileName, 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
import csv
abilityList = {}
def loadAbilities():
global abilityList
with open('FF4/FF4Abil.csv', 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return noneFix Filename Errors
Module folder had changed at some point in the past, fixed the file
path so it could find the CSVimport os
import csv
abilityList = {}
def loadAbilities():
global abilityList
fileName = "FF4P/FF4P_Abil.csv"
if not os.path.exists(fileName):
fileName = "FF4P_Abil.csv"
with open(fileName, 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
<commit_before>import csv
abilityList = {}
def loadAbilities():
global abilityList
with open('FF4/FF4Abil.csv', 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none<commit_msg>Fix Filename Errors
Module folder had changed at some point in the past, fixed the file
path so it could find the CSV<commit_after>import os
import csv
abilityList = {}
def loadAbilities():
global abilityList
fileName = "FF4P/FF4P_Abil.csv"
if not os.path.exists(fileName):
fileName = "FF4P_Abil.csv"
with open(fileName, 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
cc754aeb16aa41f936d59a3b5746a3bec69489ef
|
sts/util/convenience.py
|
sts/util/convenience.py
|
import time
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
|
import time
def is_sorted(l):
return all(l[i] <= l[i+1] for i in xrange(len(l)-1))
def is_strictly_sorted(l):
return all(l[i] < l[i+1] for i in xrange(len(l)-1))
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
|
Add little functions for checking if a list is sorted without sorting it
|
Add little functions for checking if a list is sorted without sorting it
|
Python
|
apache-2.0
|
ucb-sts/sts,jmiserez/sts,jmiserez/sts,ucb-sts/sts
|
import time
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
Add little functions for checking if a list is sorted without sorting it
|
import time
def is_sorted(l):
return all(l[i] <= l[i+1] for i in xrange(len(l)-1))
def is_strictly_sorted(l):
return all(l[i] < l[i+1] for i in xrange(len(l)-1))
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
|
<commit_before>import time
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
<commit_msg>Add little functions for checking if a list is sorted without sorting it<commit_after>
|
import time
def is_sorted(l):
return all(l[i] <= l[i+1] for i in xrange(len(l)-1))
def is_strictly_sorted(l):
return all(l[i] < l[i+1] for i in xrange(len(l)-1))
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
|
import time
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
Add little functions for checking if a list is sorted without sorting itimport time
def is_sorted(l):
return all(l[i] <= l[i+1] for i in xrange(len(l)-1))
def is_strictly_sorted(l):
return all(l[i] < l[i+1] for i in xrange(len(l)-1))
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
|
<commit_before>import time
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
<commit_msg>Add little functions for checking if a list is sorted without sorting it<commit_after>import time
def is_sorted(l):
return all(l[i] <= l[i+1] for i in xrange(len(l)-1))
def is_strictly_sorted(l):
return all(l[i] < l[i+1] for i in xrange(len(l)-1))
def timestamp_string():
return time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
def find(f, seq):
"""Return first item in sequence where f(item) == True."""
for item in seq:
if f(item):
return item
def find_index(f, seq):
"""Return the index of the first item in sequence where f(item) == True."""
for index, item in enumerate(seq):
if f(item):
return index
|
a8ee8b389359f67a4e0eb0891ccb2278608e3df0
|
openacademy/model/openacademy_session.py
|
openacademy/model/openacademy_session.py
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher")
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
Add domain or and ilike
|
[REF] openacademy: Add domain or and ilike
|
Python
|
apache-2.0
|
hellomoto6/openacademy
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
[REF] openacademy: Add domain or and ilike
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher")
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
<commit_before># -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
<commit_msg>[REF] openacademy: Add domain or and ilike<commit_after>
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher")
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
[REF] openacademy: Add domain or and ilike# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher")
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
<commit_before># -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
<commit_msg>[REF] openacademy: Add domain or and ilike<commit_after># -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher")
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
529f719555a42bbdfe74d678ef9839ed7377bcf1
|
motor.py
|
motor.py
|
import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control(sys.argv[1])
|
import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control('init')
control(sys.argv[1])
|
Add init process as default
|
Add init process as default
|
Python
|
apache-2.0
|
hideo54/R2-D2,hideo54/R2-D2,hideo54/R2-D2
|
import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control(sys.argv[1])
Add init process as default
|
import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control('init')
control(sys.argv[1])
|
<commit_before>import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control(sys.argv[1])
<commit_msg>Add init process as default<commit_after>
|
import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control('init')
control(sys.argv[1])
|
import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control(sys.argv[1])
Add init process as defaultimport sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control('init')
control(sys.argv[1])
|
<commit_before>import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control(sys.argv[1])
<commit_msg>Add init process as default<commit_after>import sys
import RPi.GPIO as GPIO
# Register Pin number
enable1 = 22
input1a = 18
input1b = 16
def control(arg):
if arg == 'init':
GPIO.setmode(GPIO.BOARD)
GPIO.setup(enable1, GPIO.OUT)
GPIO.setup(input1a, GPIO.OUT)
GPIO.setup(input1b, GPIO.OUT)
elif arg == 'forward':
GPIO.output(enable1, True)
GPIO.output(input1a, True)
GPIO.output(input1b, False)
elif arg == 'backward':
GPIO.output(enable1, True)
GPIO.output(input1a, False)
GPIO.output(input1b, True)
elif arg == 'stop':
GPIO.output(enable1, False)
elif arg == 'quit':
GPIO.cleanup()
else:
print 'No such command: ' + arg
if __name__ == '__main__':
control('init')
control(sys.argv[1])
|
b69170a0ab629f0e11d66ed71857989db1f647f9
|
scripts/analytics/institutions.py
|
scripts/analytics/institutions.py
|
from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_user_count_by_institutions():
institutions = get_institutions()
user_counts = []
for institution in institutions:
query = Q('_affiliated_institutions', 'eq', institution.node)
user_counts.append({institution.name: User.find(query).count()})
return user_counts
def get_node_count_by_institutions():
institutions = get_institutions()
node_counts = []
for institution in institutions:
query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
node_counts.append({institution.name: Node.find_by_institutions(institution, query).count()})
return node_counts
def main():
users_by_institutions = get_user_count_by_institutions()
nodes_by_institutions = get_node_count_by_institutions()
print(users_by_institutions)
print(nodes_by_institutions)
if __name__ == '__main__':
init_app()
main()
|
from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_count_by_institutions():
institutions = get_institutions()
counts = []
for institution in institutions:
user_query = Q('_affiliated_institutions', 'eq', institution.node)
node_query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
count = {
'institution': institution.name,
'users': User.find(user_query).count(),
'nodes': Node.find_by_institutions(institution, node_query).count(),
}
counts.append(count)
keen_payload = {'institution_analytics': counts}
return keen_payload
def main():
counts_by_institutions = get_count_by_institutions()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
read_key = keen_settings['private']['read_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
read_key=read_key
)
client.add_events(counts_by_institutions)
else:
print(counts_by_institutions)
if __name__ == '__main__':
init_app()
main()
|
Update script to work with Keen
|
Update script to work with Keen
|
Python
|
apache-2.0
|
leb2dg/osf.io,alexschiller/osf.io,cslzchen/osf.io,mluo613/osf.io,alexschiller/osf.io,caneruguz/osf.io,mluo613/osf.io,felliott/osf.io,adlius/osf.io,aaxelb/osf.io,cslzchen/osf.io,alexschiller/osf.io,pattisdr/osf.io,chennan47/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,aaxelb/osf.io,Nesiehr/osf.io,chrisseto/osf.io,pattisdr/osf.io,aaxelb/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,adlius/osf.io,caneruguz/osf.io,icereval/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,adlius/osf.io,pattisdr/osf.io,sloria/osf.io,emetsger/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,hmoco/osf.io,rdhyee/osf.io,emetsger/osf.io,mfraezz/osf.io,cwisecarver/osf.io,emetsger/osf.io,icereval/osf.io,monikagrabowska/osf.io,acshi/osf.io,cwisecarver/osf.io,felliott/osf.io,mattclark/osf.io,Nesiehr/osf.io,emetsger/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,mattclark/osf.io,acshi/osf.io,chrisseto/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,sloria/osf.io,cslzchen/osf.io,mluo613/osf.io,brianjgeiger/osf.io,hmoco/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,mfraezz/osf.io,binoculars/osf.io,icereval/osf.io,saradbowman/osf.io,chennan47/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,mluo613/osf.io,laurenrevere/osf.io,cslzchen/osf.io,hmoco/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,mfraezz/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,baylee-d/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,crcresearch/osf.io,leb2dg/osf.io,mluo613/osf.io,sloria/osf.io,baylee-d/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,hmoco/osf.io,baylee-d/osf.io,caneruguz/osf.io,leb2dg/osf.io,laurenrevere/osf.io,acshi/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,erinspace/osf.io,chrisseto/osf.io,acshi/osf.io,felliott/osf.io,monikagrabowska/osf.io,binoculars/osf.io,alexschiller/osf.io,binoculars/osf.io,chrisseto/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,crcresearch/osf.io,adlius/osf.io,rdhyee/osf.io,cwisecarver/osf.io,chennan47/osf.io,felliott/osf.io,TomBaxter/osf.io,erinspace/osf.io
|
from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_user_count_by_institutions():
institutions = get_institutions()
user_counts = []
for institution in institutions:
query = Q('_affiliated_institutions', 'eq', institution.node)
user_counts.append({institution.name: User.find(query).count()})
return user_counts
def get_node_count_by_institutions():
institutions = get_institutions()
node_counts = []
for institution in institutions:
query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
node_counts.append({institution.name: Node.find_by_institutions(institution, query).count()})
return node_counts
def main():
users_by_institutions = get_user_count_by_institutions()
nodes_by_institutions = get_node_count_by_institutions()
print(users_by_institutions)
print(nodes_by_institutions)
if __name__ == '__main__':
init_app()
main()
Update script to work with Keen
|
from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_count_by_institutions():
institutions = get_institutions()
counts = []
for institution in institutions:
user_query = Q('_affiliated_institutions', 'eq', institution.node)
node_query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
count = {
'institution': institution.name,
'users': User.find(user_query).count(),
'nodes': Node.find_by_institutions(institution, node_query).count(),
}
counts.append(count)
keen_payload = {'institution_analytics': counts}
return keen_payload
def main():
counts_by_institutions = get_count_by_institutions()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
read_key = keen_settings['private']['read_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
read_key=read_key
)
client.add_events(counts_by_institutions)
else:
print(counts_by_institutions)
if __name__ == '__main__':
init_app()
main()
|
<commit_before>from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_user_count_by_institutions():
institutions = get_institutions()
user_counts = []
for institution in institutions:
query = Q('_affiliated_institutions', 'eq', institution.node)
user_counts.append({institution.name: User.find(query).count()})
return user_counts
def get_node_count_by_institutions():
institutions = get_institutions()
node_counts = []
for institution in institutions:
query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
node_counts.append({institution.name: Node.find_by_institutions(institution, query).count()})
return node_counts
def main():
users_by_institutions = get_user_count_by_institutions()
nodes_by_institutions = get_node_count_by_institutions()
print(users_by_institutions)
print(nodes_by_institutions)
if __name__ == '__main__':
init_app()
main()
<commit_msg>Update script to work with Keen<commit_after>
|
from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_count_by_institutions():
institutions = get_institutions()
counts = []
for institution in institutions:
user_query = Q('_affiliated_institutions', 'eq', institution.node)
node_query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
count = {
'institution': institution.name,
'users': User.find(user_query).count(),
'nodes': Node.find_by_institutions(institution, node_query).count(),
}
counts.append(count)
keen_payload = {'institution_analytics': counts}
return keen_payload
def main():
counts_by_institutions = get_count_by_institutions()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
read_key = keen_settings['private']['read_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
read_key=read_key
)
client.add_events(counts_by_institutions)
else:
print(counts_by_institutions)
if __name__ == '__main__':
init_app()
main()
|
from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_user_count_by_institutions():
institutions = get_institutions()
user_counts = []
for institution in institutions:
query = Q('_affiliated_institutions', 'eq', institution.node)
user_counts.append({institution.name: User.find(query).count()})
return user_counts
def get_node_count_by_institutions():
institutions = get_institutions()
node_counts = []
for institution in institutions:
query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
node_counts.append({institution.name: Node.find_by_institutions(institution, query).count()})
return node_counts
def main():
users_by_institutions = get_user_count_by_institutions()
nodes_by_institutions = get_node_count_by_institutions()
print(users_by_institutions)
print(nodes_by_institutions)
if __name__ == '__main__':
init_app()
main()
Update script to work with Keenfrom modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_count_by_institutions():
institutions = get_institutions()
counts = []
for institution in institutions:
user_query = Q('_affiliated_institutions', 'eq', institution.node)
node_query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
count = {
'institution': institution.name,
'users': User.find(user_query).count(),
'nodes': Node.find_by_institutions(institution, node_query).count(),
}
counts.append(count)
keen_payload = {'institution_analytics': counts}
return keen_payload
def main():
counts_by_institutions = get_count_by_institutions()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
read_key = keen_settings['private']['read_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
read_key=read_key
)
client.add_events(counts_by_institutions)
else:
print(counts_by_institutions)
if __name__ == '__main__':
init_app()
main()
|
<commit_before>from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_user_count_by_institutions():
institutions = get_institutions()
user_counts = []
for institution in institutions:
query = Q('_affiliated_institutions', 'eq', institution.node)
user_counts.append({institution.name: User.find(query).count()})
return user_counts
def get_node_count_by_institutions():
institutions = get_institutions()
node_counts = []
for institution in institutions:
query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
node_counts.append({institution.name: Node.find_by_institutions(institution, query).count()})
return node_counts
def main():
users_by_institutions = get_user_count_by_institutions()
nodes_by_institutions = get_node_count_by_institutions()
print(users_by_institutions)
print(nodes_by_institutions)
if __name__ == '__main__':
init_app()
main()
<commit_msg>Update script to work with Keen<commit_after>from modularodm import Q
from website.app import init_app
from website.models import User, Node, Institution
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
def get_institutions():
institutions = Institution.find(Q('_id', 'ne', None))
return institutions
def get_count_by_institutions():
institutions = get_institutions()
counts = []
for institution in institutions:
user_query = Q('_affiliated_institutions', 'eq', institution.node)
node_query = (
Q('is_deleted', 'ne', True) &
Q('is_folder', 'ne', True) &
Q('parent_node', 'eq', None)
)
count = {
'institution': institution.name,
'users': User.find(user_query).count(),
'nodes': Node.find_by_institutions(institution, node_query).count(),
}
counts.append(count)
keen_payload = {'institution_analytics': counts}
return keen_payload
def main():
counts_by_institutions = get_count_by_institutions()
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
read_key = keen_settings['private']['read_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
read_key=read_key
)
client.add_events(counts_by_institutions)
else:
print(counts_by_institutions)
if __name__ == '__main__':
init_app()
main()
|
ccbc40f5bfa160a9e41de86fc4845d68da40b8c4
|
parse.py
|
parse.py
|
import re
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
oui_hash = hashlib.sha1()
companies = []
# Get the listing from the source location.
req = requests.get(location)
# Update our hash object with the value from our request string.
oui_hash.update(bytes(req.text, "utf-8"))
# Ignore the first 127 characters of junk data.
req_string = req.text[127:]
# Break the request string into a list of entries.
entries = req_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
for entry in entries:
lines = entry.split('\r\n')
matches = number_name.search(lines[1])
company = {'name': matches.group(2), 'oui': matches.group(1)}
companies.append(company)
|
import re
import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
request_hash = hashlib.sha1()
organizations = []
# Get the listing from the source location.
request = requests.get(location)
# Update our hash object with the value from our request string.
request_hash.update(bytes(request.text, "utf-8"))
# Ignore the first 127 characters of junk data.
request_string = request.text[127:]
# Break the request string into a list of entries.
entries = request_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
# For each entry...
for entry in entries:
# Break the entry into lines.
lines = entry.split('\r\n')
# Find the id and oui for the organization.
matches = oui_id.search(lines[1])
# Find the address for the organization.
address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
# Create a dictionary for the organization.
organization = {'id': matches.group(2),
'oui': matches.group(1),
'address': address}
# Append that dictionary to our list of organizations.
organizations.append(organization)
# Convert the list of organizations to a JSON formatted string.
json_organizations = json.dumps(organizations)
print(json_organizations)
|
Update variable names, add better comments, convert to JSON.
|
Update variable names, add better comments, convert to JSON.
|
Python
|
isc
|
reillysiemens/macdb
|
import re
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
oui_hash = hashlib.sha1()
companies = []
# Get the listing from the source location.
req = requests.get(location)
# Update our hash object with the value from our request string.
oui_hash.update(bytes(req.text, "utf-8"))
# Ignore the first 127 characters of junk data.
req_string = req.text[127:]
# Break the request string into a list of entries.
entries = req_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
for entry in entries:
lines = entry.split('\r\n')
matches = number_name.search(lines[1])
company = {'name': matches.group(2), 'oui': matches.group(1)}
companies.append(company)
Update variable names, add better comments, convert to JSON.
|
import re
import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
request_hash = hashlib.sha1()
organizations = []
# Get the listing from the source location.
request = requests.get(location)
# Update our hash object with the value from our request string.
request_hash.update(bytes(request.text, "utf-8"))
# Ignore the first 127 characters of junk data.
request_string = request.text[127:]
# Break the request string into a list of entries.
entries = request_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
# For each entry...
for entry in entries:
# Break the entry into lines.
lines = entry.split('\r\n')
# Find the id and oui for the organization.
matches = oui_id.search(lines[1])
# Find the address for the organization.
address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
# Create a dictionary for the organization.
organization = {'id': matches.group(2),
'oui': matches.group(1),
'address': address}
# Append that dictionary to our list of organizations.
organizations.append(organization)
# Convert the list of organizations to a JSON formatted string.
json_organizations = json.dumps(organizations)
print(json_organizations)
|
<commit_before>import re
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
oui_hash = hashlib.sha1()
companies = []
# Get the listing from the source location.
req = requests.get(location)
# Update our hash object with the value from our request string.
oui_hash.update(bytes(req.text, "utf-8"))
# Ignore the first 127 characters of junk data.
req_string = req.text[127:]
# Break the request string into a list of entries.
entries = req_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
for entry in entries:
lines = entry.split('\r\n')
matches = number_name.search(lines[1])
company = {'name': matches.group(2), 'oui': matches.group(1)}
companies.append(company)
<commit_msg>Update variable names, add better comments, convert to JSON.<commit_after>
|
import re
import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
request_hash = hashlib.sha1()
organizations = []
# Get the listing from the source location.
request = requests.get(location)
# Update our hash object with the value from our request string.
request_hash.update(bytes(request.text, "utf-8"))
# Ignore the first 127 characters of junk data.
request_string = request.text[127:]
# Break the request string into a list of entries.
entries = request_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
# For each entry...
for entry in entries:
# Break the entry into lines.
lines = entry.split('\r\n')
# Find the id and oui for the organization.
matches = oui_id.search(lines[1])
# Find the address for the organization.
address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
# Create a dictionary for the organization.
organization = {'id': matches.group(2),
'oui': matches.group(1),
'address': address}
# Append that dictionary to our list of organizations.
organizations.append(organization)
# Convert the list of organizations to a JSON formatted string.
json_organizations = json.dumps(organizations)
print(json_organizations)
|
import re
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
oui_hash = hashlib.sha1()
companies = []
# Get the listing from the source location.
req = requests.get(location)
# Update our hash object with the value from our request string.
oui_hash.update(bytes(req.text, "utf-8"))
# Ignore the first 127 characters of junk data.
req_string = req.text[127:]
# Break the request string into a list of entries.
entries = req_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
for entry in entries:
lines = entry.split('\r\n')
matches = number_name.search(lines[1])
company = {'name': matches.group(2), 'oui': matches.group(1)}
companies.append(company)
Update variable names, add better comments, convert to JSON.import re
import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
request_hash = hashlib.sha1()
organizations = []
# Get the listing from the source location.
request = requests.get(location)
# Update our hash object with the value from our request string.
request_hash.update(bytes(request.text, "utf-8"))
# Ignore the first 127 characters of junk data.
request_string = request.text[127:]
# Break the request string into a list of entries.
entries = request_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
# For each entry...
for entry in entries:
# Break the entry into lines.
lines = entry.split('\r\n')
# Find the id and oui for the organization.
matches = oui_id.search(lines[1])
# Find the address for the organization.
address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
# Create a dictionary for the organization.
organization = {'id': matches.group(2),
'oui': matches.group(1),
'address': address}
# Append that dictionary to our list of organizations.
organizations.append(organization)
# Convert the list of organizations to a JSON formatted string.
json_organizations = json.dumps(organizations)
print(json_organizations)
|
<commit_before>import re
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
number_name = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
oui_hash = hashlib.sha1()
companies = []
# Get the listing from the source location.
req = requests.get(location)
# Update our hash object with the value from our request string.
oui_hash.update(bytes(req.text, "utf-8"))
# Ignore the first 127 characters of junk data.
req_string = req.text[127:]
# Break the request string into a list of entries.
entries = req_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
for entry in entries:
lines = entry.split('\r\n')
matches = number_name.search(lines[1])
company = {'name': matches.group(2), 'oui': matches.group(1)}
companies.append(company)
<commit_msg>Update variable names, add better comments, convert to JSON.<commit_after>import re
import json
import hashlib
import requests
location = "http://www.ieee.org/netstorage/standards/oui.txt"
oui_id = re.compile(" *(\w{6}) *\(.*\)[^\w]+(.*)$")
request_hash = hashlib.sha1()
organizations = []
# Get the listing from the source location.
request = requests.get(location)
# Update our hash object with the value from our request string.
request_hash.update(bytes(request.text, "utf-8"))
# Ignore the first 127 characters of junk data.
request_string = request.text[127:]
# Break the request string into a list of entries.
entries = request_string.split('\r\n\r\n')
# Remove junk entry at the end.
del entries[-1]
# For each entry...
for entry in entries:
# Break the entry into lines.
lines = entry.split('\r\n')
# Find the id and oui for the organization.
matches = oui_id.search(lines[1])
# Find the address for the organization.
address = re.sub('\s+', ' ', ' '.join(lines[2:]).strip())
# Create a dictionary for the organization.
organization = {'id': matches.group(2),
'oui': matches.group(1),
'address': address}
# Append that dictionary to our list of organizations.
organizations.append(organization)
# Convert the list of organizations to a JSON formatted string.
json_organizations = json.dumps(organizations)
print(json_organizations)
|
6bd46c60569f8b358eafee568194b797be5020e1
|
scent.py
|
scent.py
|
from subprocess import call
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
return call(fn) == 0
|
from subprocess import Popen
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
process = Popen(fn)
try:
return process.wait() == 0
except KeyboardInterrupt:
process.terminate()
raise
|
Kill Nose when ^C in sniffer
|
Kill Nose when ^C in sniffer
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from subprocess import call
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
return call(fn) == 0
Kill Nose when ^C in sniffer
|
from subprocess import Popen
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
process = Popen(fn)
try:
return process.wait() == 0
except KeyboardInterrupt:
process.terminate()
raise
|
<commit_before>from subprocess import call
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
return call(fn) == 0
<commit_msg>Kill Nose when ^C in sniffer<commit_after>
|
from subprocess import Popen
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
process = Popen(fn)
try:
return process.wait() == 0
except KeyboardInterrupt:
process.terminate()
raise
|
from subprocess import call
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
return call(fn) == 0
Kill Nose when ^C in snifferfrom subprocess import Popen
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
process = Popen(fn)
try:
return process.wait() == 0
except KeyboardInterrupt:
process.terminate()
raise
|
<commit_before>from subprocess import call
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
return call(fn) == 0
<commit_msg>Kill Nose when ^C in sniffer<commit_after>from subprocess import Popen
from sniffer.api import runnable
@runnable
def execute_tests(*args):
fn = ['python', 'manage.py', 'test', '--noinput', '--settings=testsettings']
fn += args[1:]
process = Popen(fn)
try:
return process.wait() == 0
except KeyboardInterrupt:
process.terminate()
raise
|
46a69b1795a5946c815c16a7d910d8c680e1ed7f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'Django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Correct spelling of Django in requirements
|
Correct spelling of Django in requirements
It seems that using 'django' instead of 'Django' has the consequence that "pip install django_debug_toolbar" has the consequence of installing the latest version of Django, even if you already have Django installed.
|
Python
|
bsd-3-clause
|
megcunningham/django-debug-toolbar,jazzband/django-debug-toolbar,pevzi/django-debug-toolbar,Endika/django-debug-toolbar,barseghyanartur/django-debug-toolbar,peap/django-debug-toolbar,tim-schilling/django-debug-toolbar,tim-schilling/django-debug-toolbar,barseghyanartur/django-debug-toolbar,jazzband/django-debug-toolbar,barseghyanartur/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,spookylukey/django-debug-toolbar,seperman/django-debug-toolbar,peap/django-debug-toolbar,Endika/django-debug-toolbar,peap/django-debug-toolbar,spookylukey/django-debug-toolbar,pevzi/django-debug-toolbar,seperman/django-debug-toolbar,calvinpy/django-debug-toolbar,calvinpy/django-debug-toolbar,pevzi/django-debug-toolbar,calvinpy/django-debug-toolbar,megcunningham/django-debug-toolbar,tim-schilling/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,Endika/django-debug-toolbar,jazzband/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,spookylukey/django-debug-toolbar,megcunningham/django-debug-toolbar,seperman/django-debug-toolbar
|
from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Correct spelling of Django in requirements
It seems that using 'django' instead of 'Django' has the consequence that "pip install django_debug_toolbar" has the consequence of installing the latest version of Django, even if you already have Django installed.
|
from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'Django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Correct spelling of Django in requirements
It seems that using 'django' instead of 'Django' has the consequence that "pip install django_debug_toolbar" has the consequence of installing the latest version of Django, even if you already have Django installed.<commit_after>
|
from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'Django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Correct spelling of Django in requirements
It seems that using 'django' instead of 'Django' has the consequence that "pip install django_debug_toolbar" has the consequence of installing the latest version of Django, even if you already have Django installed.from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'Django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Correct spelling of Django in requirements
It seems that using 'django' instead of 'Django' has the consequence that "pip install django_debug_toolbar" has the consequence of installing the latest version of Django, even if you already have Django installed.<commit_after>from setuptools import setup, find_packages
from io import open
setup(
name='django-debug-toolbar',
version='1.3.2',
description='A configurable set of panels that display various debug '
'information about the current request/response.',
long_description=open('README.rst', encoding='utf-8').read(),
author='Rob Hudson',
author_email='rob@cogit8.org',
url='https://github.com/django-debug-toolbar/django-debug-toolbar',
download_url='https://pypi.python.org/pypi/django-debug-toolbar',
license='BSD',
packages=find_packages(exclude=('tests.*', 'tests', 'example')),
install_requires=[
'Django>=1.4.2',
'sqlparse',
],
include_package_data=True,
zip_safe=False, # because we're including static files
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
1efabe64683240209ce7cdb7dd3064c8bcabbdc7
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
|
#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib',
'skimage'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
|
Add skimage to install_requires list
|
Add skimage to install_requires list
|
Python
|
mit
|
amdouglas/OpenPNM,stadelmanma/OpenPNM,TomTranter/OpenPNM,amdouglas/OpenPNM,PMEAL/OpenPNM
|
#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
Add skimage to install_requires list
|
#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib',
'skimage'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
|
<commit_before>#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
<commit_msg>Add skimage to install_requires list<commit_after>
|
#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib',
'skimage'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
|
#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
Add skimage to install_requires list#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib',
'skimage'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
|
<commit_before>#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
<commit_msg>Add skimage to install_requires list<commit_after>#!/usr/bin/env python3
import os
import sys
import OpenPNM
sys.path.append(os.getcwd())
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'OpenPNM',
description = 'A framework for conducting pore network modeling simulations of multiphase transport in porous materials.',
version = OpenPNM.__version__,
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics'
],
packages = [
'OpenPNM',
'OpenPNM.Base',
'OpenPNM.Network',
'OpenPNM.Network.models',
'OpenPNM.Geometry',
'OpenPNM.Geometry.models',
'OpenPNM.Phases',
'OpenPNM.Phases.models',
'OpenPNM.Physics',
'OpenPNM.Physics.models',
'OpenPNM.Utilities',
'OpenPNM.Algorithms',
'OpenPNM.Postprocessing'
],
install_requires = [
'numpy',
'scipy>=0.14.0',
'matplotlib',
'skimage'
],
author = 'OpenPNM Team',
author_email = 'jeff.gostick@mcgill.ca',
download_url = 'https://github.com/pmeal/OpenPNM/',
url = 'https://github.com/pmeal/OpenPNM'
)
|
5d8d90ffea97f30994a7ff5654f485436a691cde
|
setup.py
|
setup.py
|
from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/johnsdea/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
|
from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/dean/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
|
Update url with new Github username.
|
Update url with new Github username.
|
Python
|
mpl-2.0
|
dean/hamper-remindme
|
from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/johnsdea/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
Update url with new Github username.
|
from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/dean/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
|
<commit_before>from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/johnsdea/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
<commit_msg>Update url with new Github username.<commit_after>
|
from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/dean/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
|
from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/johnsdea/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
Update url with new Github username.from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/dean/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
|
<commit_before>from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/johnsdea/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
<commit_msg>Update url with new Github username.<commit_after>from distutils.core import setup
with open('requirements.txt') as f:
requirements = [l.strip() for l in f]
setup(
name='hamper-remindme',
version='0.1',
packages=['hamper-remindme'],
author='Dean Johnson',
author_email='deanjohnson222@gmail.com',
url='https://github.com/dean/hamper-remindme',
install_requires=requirements,
package_data={'hamper-remindme': ['requirements.txt', 'README.md', 'LICENSE']}
)
|
1ef2eadd317172c9d3d51c30c7e424a99ce47a05
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
Add some final trove classifiers to help document the project
|
Add some final trove classifiers to help document the project
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
Operating System :: OS Independent
|
Python
|
mit
|
uiri/toml,uiri/toml
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
Add some final trove classifiers to help document the project
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
Operating System :: OS Independent
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
<commit_msg>Add some final trove classifiers to help document the project
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
Operating System :: OS Independent<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
Add some final trove classifiers to help document the project
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
Operating System :: OS Independenttry:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
<commit_msg>Add some final trove classifiers to help document the project
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
Operating System :: OS Independent<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="William Pearson",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
43ad3b2d2e25b816d6d7b339d62e674541d76712
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@50d6124a3e3fcd2a234b3373831075390b886a15#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
|
from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@c421063aeff60c316693756da3477634b8551f18#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
|
Update dependency link for parcel and recent DTT-99 fix
|
Update dependency link for parcel and recent DTT-99 fix
|
Python
|
apache-2.0
|
NCI-GDC/gdc-client,NCI-GDC/gdc-client
|
from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@50d6124a3e3fcd2a234b3373831075390b886a15#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
Update dependency link for parcel and recent DTT-99 fix
|
from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@c421063aeff60c316693756da3477634b8551f18#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
|
<commit_before>from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@50d6124a3e3fcd2a234b3373831075390b886a15#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
<commit_msg>Update dependency link for parcel and recent DTT-99 fix<commit_after>
|
from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@c421063aeff60c316693756da3477634b8551f18#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
|
from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@50d6124a3e3fcd2a234b3373831075390b886a15#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
Update dependency link for parcel and recent DTT-99 fixfrom setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@c421063aeff60c316693756da3477634b8551f18#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
|
<commit_before>from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@50d6124a3e3fcd2a234b3373831075390b886a15#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
<commit_msg>Update dependency link for parcel and recent DTT-99 fix<commit_after>from setuptools import setup, find_packages
from gdc_client.version import __version__
setup(
name="gdc_client",
version=__version__,
packages=find_packages(),
package_data={},
install_requires=[
'parcel',
'lxml==3.5.0b1',
'PyYAML==3.11',
'jsonschema==2.5.1',
'pyOpenSSL==17.1.0',
'ndg-httpsclient==0.4.2',
'pyasn1==0.2.3',
],
dependency_links=[
'git+https://github.com/LabAdvComp/parcel.git@c421063aeff60c316693756da3477634b8551f18#egg=parcel',
],
scripts=[
'bin/gdc-client',
],
)
|
662dd57b0bf761d8028a0b0edf107da8cf1055df
|
setup.py
|
setup.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.0',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.1',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
Make required py_zipkin v0.8.1 for accurate server send timings
|
Make required py_zipkin v0.8.1 for accurate server send timings
|
Python
|
apache-2.0
|
Yelp/pyramid_zipkin,bplotnick/pyramid_zipkin
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.0',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
Make required py_zipkin v0.8.1 for accurate server send timings
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.1',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.0',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
<commit_msg>Make required py_zipkin v0.8.1 for accurate server send timings<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.1',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.0',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
Make required py_zipkin v0.8.1 for accurate server send timings#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.1',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.0',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
<commit_msg>Make required py_zipkin v0.8.1 for accurate server send timings<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
__version__ = '0.19.1'
setup(
name='pyramid_zipkin',
version=__version__,
provides=["pyramid_zipkin"],
author='Yelp, Inc.',
author_email='opensource+pyramid-zipkin@yelp.com',
license='Copyright Yelp 2016',
url="https://github.com/Yelp/pyramid_zipkin",
description='Zipkin instrumentation for the Pyramid framework.',
packages=find_packages(exclude=('tests*',)),
package_data={'': ['*.thrift']},
install_requires=[
'py_zipkin >= 0.8.1',
'pyramid',
'six',
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
)
|
8afe9dc1e1bc5e632d6487b7a86a0df1bc73d154
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu'],
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu', 'kitsu.http'],
zip_safe=True,
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Add kitsu.http to packages and mark it zip_safe
|
Add kitsu.http to packages and mark it zip_safe
|
Python
|
mit
|
snaury/kitsu.http,snaury/kitsu.http
|
import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu'],
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add kitsu.http to packages and mark it zip_safe
|
import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu', 'kitsu.http'],
zip_safe=True,
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu'],
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add kitsu.http to packages and mark it zip_safe<commit_after>
|
import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu', 'kitsu.http'],
zip_safe=True,
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu'],
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add kitsu.http to packages and mark it zip_safeimport os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu', 'kitsu.http'],
zip_safe=True,
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu'],
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add kitsu.http to packages and mark it zip_safe<commit_after>import os
from setuptools import setup
def read(name):
return open(os.path.join(os.path.dirname(__file__), name), 'r').read()
setup(
name="kitsu.http",
version="0.0.1",
description="Low-level HTTP library",
long_description=read('README'),
author="Alexey Borzenkov",
author_email="snaury@gmail.com",
url="http://git.kitsu.ru/mine/kitsu-http.git",
license="MIT License",
platforms=['any'],
packages=['kitsu', 'kitsu.http'],
zip_safe=True,
test_suite='tests.test_suite',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
d0b930e6d7ce3bff833bd177bc13a908cb1bed0d
|
setup.py
|
setup.py
|
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailvoas@gmail.com',
url='https://github.com/trimailov/timeflow',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
|
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailovas@gmail.com',
url='https://github.com/trimailov/timeflow',
license='MIT',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
|
Add license type and fix typo
|
Add license type and fix typo
|
Python
|
mit
|
trimailov/timeflow
|
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailvoas@gmail.com',
url='https://github.com/trimailov/timeflow',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
Add license type and fix typo
|
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailovas@gmail.com',
url='https://github.com/trimailov/timeflow',
license='MIT',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
|
<commit_before>import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailvoas@gmail.com',
url='https://github.com/trimailov/timeflow',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
<commit_msg>Add license type and fix typo<commit_after>
|
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailovas@gmail.com',
url='https://github.com/trimailov/timeflow',
license='MIT',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
|
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailvoas@gmail.com',
url='https://github.com/trimailov/timeflow',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
Add license type and fix typoimport os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailovas@gmail.com',
url='https://github.com/trimailov/timeflow',
license='MIT',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
|
<commit_before>import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailvoas@gmail.com',
url='https://github.com/trimailov/timeflow',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
<commit_msg>Add license type and fix typo<commit_after>import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='timeflow',
packages=['timeflow'],
version='0.2',
description='Small CLI time logger',
author='Justas Trimailovas',
author_email='j.trimailovas@gmail.com',
url='https://github.com/trimailov/timeflow',
license='MIT',
keywords=['timelogger', 'logging', 'timetracker', 'tracker'],
long_description=read('README.rst'),
entry_points='''
[console_scripts]
timeflow=timeflow.main:main
tf=timeflow.main:main
''',
)
|
397c8b952ad258d2419c428f0cf9961b65bc41d2
|
setup.py
|
setup.py
|
# python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Derek Brandao',
maintainer_email='dbrandao@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
|
# python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Python Spanner ORM developers',
maintainer_email='python-spanner-orm@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
|
Update maintainers to be a mailing list
|
Update maintainers to be a mailing list
|
Python
|
apache-2.0
|
google/python-spanner-orm
|
# python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Derek Brandao',
maintainer_email='dbrandao@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
Update maintainers to be a mailing list
|
# python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Python Spanner ORM developers',
maintainer_email='python-spanner-orm@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
|
<commit_before># python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Derek Brandao',
maintainer_email='dbrandao@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
<commit_msg>Update maintainers to be a mailing list<commit_after>
|
# python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Python Spanner ORM developers',
maintainer_email='python-spanner-orm@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
|
# python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Derek Brandao',
maintainer_email='dbrandao@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
Update maintainers to be a mailing list# python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Python Spanner ORM developers',
maintainer_email='python-spanner-orm@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
|
<commit_before># python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Derek Brandao',
maintainer_email='dbrandao@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
<commit_msg>Update maintainers to be a mailing list<commit_after># python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""spanner_orm setup file."""
from setuptools import setup
setup(
name='spanner-orm',
version='0.1.10',
description='Basic ORM for Spanner',
maintainer='Python Spanner ORM developers',
maintainer_email='python-spanner-orm@google.com',
url='https://github.com/google/python-spanner-orm',
packages=['spanner_orm', 'spanner_orm.admin'],
include_package_data=True,
python_requires='~=3.7',
install_requires=['google-cloud-spanner >= 1.6, <2.0.0dev', 'frozendict'],
tests_require=['absl-py', 'google-api-core', 'portpicker'],
entry_points={
'console_scripts': ['spanner-orm = spanner_orm.admin.scripts:main']
})
|
05baf9fc587e0e4f3909cb130b16af5d6629face
|
setup.py
|
setup.py
|
from setuptools import setup
classifiers = ['Development Status :: 4 - Production/Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
|
from setuptools import setup
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license='BSDv3',
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
|
Fix the trover classifier and license
|
Fix the trover classifier and license
|
Python
|
bsd-3-clause
|
aweber/avroconsumer
|
from setuptools import setup
classifiers = ['Development Status :: 4 - Production/Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
Fix the trover classifier and license
|
from setuptools import setup
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license='BSDv3',
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
|
<commit_before>from setuptools import setup
classifiers = ['Development Status :: 4 - Production/Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
<commit_msg>Fix the trover classifier and license<commit_after>
|
from setuptools import setup
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license='BSDv3',
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
|
from setuptools import setup
classifiers = ['Development Status :: 4 - Production/Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
Fix the trover classifier and licensefrom setuptools import setup
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license='BSDv3',
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
|
<commit_before>from setuptools import setup
classifiers = ['Development Status :: 4 - Production/Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license=open('LICENSE').read(),
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
<commit_msg>Fix the trover classifier and license<commit_after>from setuptools import setup
classifiers = ['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(name='avroconsumer',
version='0.1.0',
description="Simplified PostgreSQL client built upon Psycopg2",
maintainer="Gavin M. Roy",
maintainer_email="gavinr@aweber.com",
url="https://github.com/aweber/avroconsumer",
install_requires=['rejected', 'avro'],
license='BSDv3',
package_data={'': ['LICENSE', 'README.rst']},
py_modules=['avroconsumer'],
classifiers=classifiers)
|
e546b82956455ba4c5510837c7527efdfc8dec47
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
package_data={'': ['MIT-LICENSE']},
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
Include license file in resulting tar.gz
|
Include license file in resulting tar.gz
I am going to package this module for Fedora. And it is better for Fedora if tar.gz file contains the file with the license (it is better for auditing). Can you please include it with next version?
|
Python
|
mit
|
Toilal/ordered-set,LuminosoInsight/ordered-set
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
Include license file in resulting tar.gz
I am going to package this module for Fedora. And it is better for Fedora if tar.gz file contains the file with the license (it is better for auditing). Can you please include it with next version?
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
package_data={'': ['MIT-LICENSE']},
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
<commit_before>from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
<commit_msg>Include license file in resulting tar.gz
I am going to package this module for Fedora. And it is better for Fedora if tar.gz file contains the file with the license (it is better for auditing). Can you please include it with next version?<commit_after>
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
package_data={'': ['MIT-LICENSE']},
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
Include license file in resulting tar.gz
I am going to package this module for Fedora. And it is better for Fedora if tar.gz file contains the file with the license (it is better for auditing). Can you please include it with next version?from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
package_data={'': ['MIT-LICENSE']},
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
<commit_before>from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
<commit_msg>Include license file in resulting tar.gz
I am going to package this module for Fedora. And it is better for Fedora if tar.gz file contains the file with the license (it is better for auditing). Can you please include it with next version?<commit_after>from setuptools import setup
setup(
name="ordered-set",
version = '1.3.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
package_data={'': ['MIT-LICENSE']},
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
]
)
|
9e17f00c9a3ffd83542db5053b7c5e23d5ff1e03
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1'
'django-sirtrevor>=0.2.3,<0.3',
],
)
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1',
'django-sirtrevor>=0.2.3,<0.3',
],
)
|
Allow polymorphic tree pre-release to be installed
|
Allow polymorphic tree pre-release to be installed
|
Python
|
bsd-2-clause
|
meshy/django-conman,meshy/django-conman,Ian-Foote/django-conman
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1'
'django-sirtrevor>=0.2.3,<0.3',
],
)
Allow polymorphic tree pre-release to be installed
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1',
'django-sirtrevor>=0.2.3,<0.3',
],
)
|
<commit_before>from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1'
'django-sirtrevor>=0.2.3,<0.3',
],
)
<commit_msg>Allow polymorphic tree pre-release to be installed<commit_after>
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1',
'django-sirtrevor>=0.2.3,<0.3',
],
)
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1'
'django-sirtrevor>=0.2.3,<0.3',
],
)
Allow polymorphic tree pre-release to be installedfrom setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1',
'django-sirtrevor>=0.2.3,<0.3',
],
)
|
<commit_before>from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1'
'django-sirtrevor>=0.2.3,<0.3',
],
)
<commit_msg>Allow polymorphic tree pre-release to be installed<commit_after>from setuptools import setup, find_packages
version = '0.0.1'
setup(
name='django-conman',
packages=find_packages(),
include_package_data=True,
version=version,
description='A modular CMS for django',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-conman/',
install_requires=[
'django-mptt>=0.6.1,<=0.7',
'django-polymorphic-tree>=1.0b1',
'django-sirtrevor>=0.2.3,<0.3',
],
)
|
b80f775ef6307d625f64420c1852eb6119ae8cf7
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
try:
import pypandoc
readme = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
|
Modify README to autoconvert to rst if possible at upload
|
Modify README to autoconvert to rst if possible at upload
|
Python
|
mit
|
theherk/pinkopy
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
Modify README to autoconvert to rst if possible at upload
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
try:
import pypandoc
readme = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
<commit_msg>Modify README to autoconvert to rst if possible at upload<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
try:
import pypandoc
readme = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
Modify README to autoconvert to rst if possible at upload#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
try:
import pypandoc
readme = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
<commit_msg>Modify README to autoconvert to rst if possible at upload<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
import sys
try:
import pypandoc
readme = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
with open('README.md') as f:
readme = f.read()
install_requires = [
'cachetools>=1.1.5',
'requests>=2.7.0',
'xmltodict>=0.9.2',
]
setup(
name='pinkopy',
version='1.3.dev',
description='Python wrapper for Commvault api',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/pinkopy',
download_url='https://github.com/theherk/pinkopy/archive/1.3.dev.zip',
packages=find_packages(),
platforms=['all'],
license='MIT',
install_requires=install_requires,
classifiers=[
'Development Status :: 4 - Beta',
'License :: Other/Proprietary License',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
)
|
e39c93cdd987769a7efe7008b8bc3c80a2395084
|
setup.py
|
setup.py
|
from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['chardet']
)
|
from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['charade']
)
|
Use the right module name. ;)
|
Use the right module name. ;)
|
Python
|
lgpl-2.1
|
zougloub/charade,ddboline/chardet,chardet/chardet,ddboline/chardet,barak066/chardet,nvbn/charade,sigmavirus24/charade,memnonila/chardet,chardet/chardet,asdfsx/chardet,asdfsx/chardet,barak066/chardet,memnonila/chardet
|
from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['chardet']
)
Use the right module name. ;)
|
from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['charade']
)
|
<commit_before>from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['chardet']
)
<commit_msg>Use the right module name. ;)<commit_after>
|
from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['charade']
)
|
from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['chardet']
)
Use the right module name. ;)from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['charade']
)
|
<commit_before>from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['chardet']
)
<commit_msg>Use the right module name. ;)<commit_after>from setuptools import setup
# patch distutils if it can't cope with the "classifiers" or "download_url"
# keywords (prior to python 2.3.0).
from distutils.dist import DistributionMetadata
if not hasattr(DistributionMetadata, 'classifiers'):
DistributionMetadata.classifiers = None
if not hasattr(DistributionMetadata, 'download_url'):
DistributionMetadata.download_url = None
setup(
name='charade',
version='1.1',
description='Universal encoding detector',
long_description=open('README.rst').read(),
author='Mark Pilgrim',
author_email='mark@diveintomark.org',
url='https://github.com/sigmavirus24/charade',
license="LGPL",
platforms=['POSIX', 'Windows'],
keywords=['encoding', 'i18n', 'xml'],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public"
" License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
],
scripts=['bin/chardetect.py'],
packages=['charade']
)
|
53d66409b331f80db22ee14b6d1837593c7024bb
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['progressiveCactus = cactus.progressive.cactus_progressive:main']},)
|
from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['cactus = cactus.progressive.cactus_progressive:main']},)
|
Change entrypoint name from 'progressiveCactus' to 'cactus'
|
Change entrypoint name from 'progressiveCactus' to 'cactus'
|
Python
|
mit
|
benedictpaten/cactus,benedictpaten/cactus,benedictpaten/cactus,benedictpaten/cactus,benedictpaten/cactus
|
from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['progressiveCactus = cactus.progressive.cactus_progressive:main']},)
Change entrypoint name from 'progressiveCactus' to 'cactus'
|
from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['cactus = cactus.progressive.cactus_progressive:main']},)
|
<commit_before>from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['progressiveCactus = cactus.progressive.cactus_progressive:main']},)
<commit_msg>Change entrypoint name from 'progressiveCactus' to 'cactus'<commit_after>
|
from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['cactus = cactus.progressive.cactus_progressive:main']},)
|
from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['progressiveCactus = cactus.progressive.cactus_progressive:main']},)
Change entrypoint name from 'progressiveCactus' to 'cactus'from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['cactus = cactus.progressive.cactus_progressive:main']},)
|
<commit_before>from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['progressiveCactus = cactus.progressive.cactus_progressive:main']},)
<commit_msg>Change entrypoint name from 'progressiveCactus' to 'cactus'<commit_after>from setuptools import setup, find_packages
import os
import subprocess
os.system("pip install git+https://github.com/ComparativeGenomicsToolkit/sonLib@toil")
versionFile = "src/cactus/shared/version.py"
if os.path.exists(versionFile):
os.remove(versionFile)
git_commit = subprocess.check_output('git log --pretty=oneline -n 1 -- $(pwd)', shell=True).split()[0]
with open(versionFile, 'w') as versionFH:
versionFH.write("cactus_commit = '%s'" % git_commit)
setup(
name="progressiveCactus",
version="1.0",
author="Benedict Paten",
package_dir = {'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
package_data={'cactus': ['*_config.xml']},
# We use the __file__ attribute so this package isn't zip_safe.
zip_safe=False,
install_requires=[
'subprocess32',
'psutil',
'networkx'],
entry_points={
'console_scripts': ['cactus = cactus.progressive.cactus_progressive:main']},)
|
96924aea75dbbe82fec6c23df405a15e0bfeeac0
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
Exclude tests folder from dist
|
Exclude tests folder from dist
|
Python
|
bsd-2-clause
|
incuna/django-pgcrypto-fields,atdsaa/django-pgcrypto-fields
|
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
Exclude tests folder from dist
|
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
<commit_before>from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
<commit_msg>Exclude tests folder from dist<commit_after>
|
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
Exclude tests folder from distfrom setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
<commit_before>from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
<commit_msg>Exclude tests folder from dist<commit_after>from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
71fd1b82f4bc9f009a80a0495fafc82c15aa58b3
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
],
)
|
# -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
'nose',
],
)
|
Add nose to list of dependencies.
|
Add nose to list of dependencies.
|
Python
|
mit
|
lkluft/lehrex
|
# -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
],
)
Add nose to list of dependencies.
|
# -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
'nose',
],
)
|
<commit_before># -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
],
)
<commit_msg>Add nose to list of dependencies.<commit_after>
|
# -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
'nose',
],
)
|
# -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
],
)
Add nose to list of dependencies.# -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
'nose',
],
)
|
<commit_before># -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
],
)
<commit_msg>Add nose to list of dependencies.<commit_after># -*- coding: utf-8 -*-
import sys
from distutils.core import setup
from setuptools import find_packages
from lehrex import __version__
if not sys.version_info >= (3, 5, 1):
sys.exit('Only support Python version >=3.5.1.\n'
'Found version is {}'.format(sys.version))
setup(
name='lehrex',
author='Lukas Kluft',
author_email='lukas.kluft@gmail.com',
url='https://github.com/lkluft/lehrex',
download_url='https://github.com/lkluft/lehrex/tarball/' + __version__,
version=__version__,
packages=find_packages(),
license='MIT',
description='Support the research during the Lehrexkursion.',
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
include_package_data=True,
install_requires=[
'matplotlib>=1.5.1',
'numpy>=1.10.4',
'nose',
],
)
|
f132f14d60a60bb2af89ba1c1d4b0c31cff68b6f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.2",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.3",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
|
Change version 1.1.2 to 1.1.3
|
Change version 1.1.2 to 1.1.3
|
Python
|
mit
|
iktakahiro/slackpy,DeviaVir/slackpy
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.2",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
Change version 1.1.2 to 1.1.3
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.3",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.2",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
<commit_msg>Change version 1.1.2 to 1.1.3<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.3",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.2",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
Change version 1.1.2 to 1.1.3#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.3",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.2",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
<commit_msg>Change version 1.1.2 to 1.1.3<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
__author__ = 'Takahiro Ikeuchi'
setup(
name="slackpy",
version="1.1.3",
py_modules=['slackpy'],
package_dir={'': 'slackpy'},
install_requires=open('requirements.txt').read().splitlines(),
description="Simple Slack client library",
long_description=open('README.txt').read(),
author='Takahiro Ikeuchi',
author_email='takahiro.ikeuchi@gmail.com',
url='https://github.com/iktakahiro/slackpy',
keywords=["Slack", "Slack Client"],
license='MIT',
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Logging",
"Topic :: Communications :: Chat"
],
entry_points={
"console_scripts": [
"slackpy=slackpy:main",
],
},
)
|
e83ea97a36bfa308359e3377dfd4a14aaf045be4
|
shell.py
|
shell.py
|
import sys, os, subprocess
def run_shell_command(cmdline, pipe_output=True, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
import sys, os, subprocess
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
Disable buffering in Python subprocesses so that output appears immediately, and make sure the output encoding is UTF-8.
|
Disable buffering in Python subprocesses so that output appears immediately, and make sure the output encoding is UTF-8.
|
Python
|
mit
|
shaurz/devo
|
import sys, os, subprocess
def run_shell_command(cmdline, pipe_output=True, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
Disable buffering in Python subprocesses so that output appears immediately, and make sure the output encoding is UTF-8.
|
import sys, os, subprocess
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
<commit_before>import sys, os, subprocess
def run_shell_command(cmdline, pipe_output=True, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
<commit_msg>Disable buffering in Python subprocesses so that output appears immediately, and make sure the output encoding is UTF-8.<commit_after>
|
import sys, os, subprocess
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
import sys, os, subprocess
def run_shell_command(cmdline, pipe_output=True, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
Disable buffering in Python subprocesses so that output appears immediately, and make sure the output encoding is UTF-8.import sys, os, subprocess
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
<commit_before>import sys, os, subprocess
def run_shell_command(cmdline, pipe_output=True, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
<commit_msg>Disable buffering in Python subprocesses so that output appears immediately, and make sure the output encoding is UTF-8.<commit_after>import sys, os, subprocess
def make_environment(env=None):
if env is None:
env = os.environ
env = env.copy()
env["PYTHONUNBUFFERED"] = "1"
env["PYTHONIOENCODING"] = "UTF-8"
return env
def run_shell_command(cmdline, pipe_output=True, env=None, **kwargs):
if sys.platform == "win32":
args = cmdline
else:
args = [os.environ.get("SHELL", "/bin/sh")]
process = subprocess.Popen(args,
stdin = subprocess.PIPE if sys.platform != "win32" else None,
stdout = subprocess.PIPE if pipe_output else None,
stderr = subprocess.STDOUT if pipe_output else None,
bufsize = 1,
close_fds = (sys.platform != "win32"),
shell = (sys.platform == "win32"),
env = make_environment(env),
**kwargs)
if sys.platform != "win32":
process.stdin.write(cmdline)
process.stdin.close()
return process
def kill_shell_process(process, force=False):
if sys.platform != "win32":
signal = "-KILL" if force else "-TERM"
rc = subprocess.call(["pkill", signal, "-P", str(process.pid)])
if rc == 0:
return
if force:
process.kill()
else:
process.terminate()
|
df32343a60aaf39802953fdfb0270c9e0f5fa477
|
reports/views.py
|
reports/views.py
|
from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request else None
report_form = ReportForm(data)
if report_form.is_valid():
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
|
from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404, redirect
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request.POST else None
report_form = ReportForm(data)
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
if report_form.is_valid():
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return redirect('/members/profile/')
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
|
Fix the request.POST, usage of formset and redirect at the end
|
Fix the request.POST, usage of formset and redirect at the end
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request else None
report_form = ReportForm(data)
if report_form.is_valid():
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
Fix the request.POST, usage of formset and redirect at the end
|
from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404, redirect
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request.POST else None
report_form = ReportForm(data)
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
if report_form.is_valid():
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return redirect('/members/profile/')
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
|
<commit_before>from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request else None
report_form = ReportForm(data)
if report_form.is_valid():
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
<commit_msg>Fix the request.POST, usage of formset and redirect at the end<commit_after>
|
from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404, redirect
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request.POST else None
report_form = ReportForm(data)
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
if report_form.is_valid():
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return redirect('/members/profile/')
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
|
from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request else None
report_form = ReportForm(data)
if report_form.is_valid():
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
Fix the request.POST, usage of formset and redirect at the endfrom django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404, redirect
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request.POST else None
report_form = ReportForm(data)
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
if report_form.is_valid():
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return redirect('/members/profile/')
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
|
<commit_before>from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request else None
report_form = ReportForm(data)
if report_form.is_valid():
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
<commit_msg>Fix the request.POST, usage of formset and redirect at the end<commit_after>from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator
from django.shortcuts import render, get_object_or_404, redirect
from .forms import ReportForm, CopyFormSet
from .models import Report
@permission_required('reports.add_report', login_url='members:login')
def add_report(request):
data = request.POST if request.POST else None
report_form = ReportForm(data)
formset = CopyFormSet(data, instance=request.session.get('report_in_creation', Report()))
if report_form.is_valid():
report = report_form.save()
request.session['report_in_creation'] = formset.instance = report
if formset.is_valid():
formset.save()
del request.session['report_in_creation']
return redirect('/members/profile/')
return render(request, 'reports/add.html', locals())
def listing(request, page):
reports_list = Report.objects.all()
paginator = Paginator(reports_list, 30)
reports = paginator.page(page)
return render(request, 'reports/listing.html', {"reports": reports})
def show(request, id):
report = get_object_or_404(Report, id=id)
return render(request, 'reports/show.html', locals())
|
e85e9afec6afb038b3188038d6c83341d08c67da
|
src/service_api/python/cloudi_service_api.py
|
src/service_api/python/cloudi_service_api.py
|
#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
os.path.sep.join(_FILE_DIRECTORY[:-2] + ['api', 'python']),
])
import jsonrpclib
import erlang
class _ServiceDescription(object):
# pylint: disable=too-few-public-methods
def __init__(self, *args):
self.__args = args
def __str__(self):
return str(self.__args)
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
|
#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
])
import jsonrpclib
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
|
Fix Python CloudI Service API interface using JSON-RPC.
|
Fix Python CloudI Service API interface using JSON-RPC.
|
Python
|
mit
|
CloudI/CloudI,CloudI/CloudI,CloudI/CloudI,CloudI/CloudI,CloudI/CloudI,CloudI/CloudI,CloudI/CloudI,CloudI/CloudI,CloudI/CloudI,CloudI/CloudI,CloudI/CloudI
|
#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
os.path.sep.join(_FILE_DIRECTORY[:-2] + ['api', 'python']),
])
import jsonrpclib
import erlang
class _ServiceDescription(object):
# pylint: disable=too-few-public-methods
def __init__(self, *args):
self.__args = args
def __str__(self):
return str(self.__args)
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
Fix Python CloudI Service API interface using JSON-RPC.
|
#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
])
import jsonrpclib
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
|
<commit_before>#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
os.path.sep.join(_FILE_DIRECTORY[:-2] + ['api', 'python']),
])
import jsonrpclib
import erlang
class _ServiceDescription(object):
# pylint: disable=too-few-public-methods
def __init__(self, *args):
self.__args = args
def __str__(self):
return str(self.__args)
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
<commit_msg>Fix Python CloudI Service API interface using JSON-RPC.<commit_after>
|
#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
])
import jsonrpclib
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
|
#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
os.path.sep.join(_FILE_DIRECTORY[:-2] + ['api', 'python']),
])
import jsonrpclib
import erlang
class _ServiceDescription(object):
# pylint: disable=too-few-public-methods
def __init__(self, *args):
self.__args = args
def __str__(self):
return str(self.__args)
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
Fix Python CloudI Service API interface using JSON-RPC.#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
])
import jsonrpclib
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
|
<commit_before>#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
os.path.sep.join(_FILE_DIRECTORY[:-2] + ['api', 'python']),
])
import jsonrpclib
import erlang
class _ServiceDescription(object):
# pylint: disable=too-few-public-methods
def __init__(self, *args):
self.__args = args
def __str__(self):
return str(self.__args)
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
<commit_msg>Fix Python CloudI Service API interface using JSON-RPC.<commit_after>#-*-Mode:python;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
# ex: set ft=python fenc=utf-8 sts=4 ts=4 sw=4 et nomod:
"""
CloudI Service API <https://cloudi.org/api.html#2_Intro>.
"""
# pylint: disable=wrong-import-position
import sys
import os
_FILE_DIRECTORY = os.path.dirname(os.path.abspath(__file__)).split(os.path.sep)
sys.path.extend([
os.path.sep.join(_FILE_DIRECTORY + ['jsonrpclib']),
])
import jsonrpclib
class CloudI(object):
"""
CloudI Service API object (communicating with JSON-RPC)
"""
# pylint: disable=too-few-public-methods
# initialize with configuration file defaults
def __init__(self, host='localhost', port=6464):
address = 'http://%s:%d/cloudi/api/rpc.json' % (host, port)
self.__server = jsonrpclib.Server(address)
def __getattr__(self, name):
return self.__server.__getattr__(name)
|
94d47cfc6db684beda275f8658660a3bd92b319d
|
src/syft/grid/client/request_api/user_api.py
|
src/syft/grid/client/request_api/user_api.py
|
# stdlib
from typing import Any
from typing import Dict
# third party
from pandas import DataFrame
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key):
return self.get(user_id=key)
def __delitem__(self, key):
self.delete(user_id=key)
|
# stdlib
from typing import Any
from typing import Callable
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send: Callable):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key: int) -> Any:
return self.get(user_id=key)
def __delitem__(self, key: int) -> None:
self.delete(user_id=key)
|
Update User API - ADD type hints - Remove unused imports
|
Update User API
- ADD type hints
- Remove unused imports
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
# stdlib
from typing import Any
from typing import Dict
# third party
from pandas import DataFrame
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key):
return self.get(user_id=key)
def __delitem__(self, key):
self.delete(user_id=key)
Update User API
- ADD type hints
- Remove unused imports
|
# stdlib
from typing import Any
from typing import Callable
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send: Callable):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key: int) -> Any:
return self.get(user_id=key)
def __delitem__(self, key: int) -> None:
self.delete(user_id=key)
|
<commit_before># stdlib
from typing import Any
from typing import Dict
# third party
from pandas import DataFrame
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key):
return self.get(user_id=key)
def __delitem__(self, key):
self.delete(user_id=key)
<commit_msg>Update User API
- ADD type hints
- Remove unused imports<commit_after>
|
# stdlib
from typing import Any
from typing import Callable
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send: Callable):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key: int) -> Any:
return self.get(user_id=key)
def __delitem__(self, key: int) -> None:
self.delete(user_id=key)
|
# stdlib
from typing import Any
from typing import Dict
# third party
from pandas import DataFrame
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key):
return self.get(user_id=key)
def __delitem__(self, key):
self.delete(user_id=key)
Update User API
- ADD type hints
- Remove unused imports# stdlib
from typing import Any
from typing import Callable
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send: Callable):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key: int) -> Any:
return self.get(user_id=key)
def __delitem__(self, key: int) -> None:
self.delete(user_id=key)
|
<commit_before># stdlib
from typing import Any
from typing import Dict
# third party
from pandas import DataFrame
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key):
return self.get(user_id=key)
def __delitem__(self, key):
self.delete(user_id=key)
<commit_msg>Update User API
- ADD type hints
- Remove unused imports<commit_after># stdlib
from typing import Any
from typing import Callable
# syft relative
from ...messages.user_messages import CreateUserMessage
from ...messages.user_messages import DeleteUserMessage
from ...messages.user_messages import GetUserMessage
from ...messages.user_messages import GetUsersMessage
from ...messages.user_messages import UpdateUserMessage
from .request_api import GridRequestAPI
class UserRequestAPI(GridRequestAPI):
response_key = "user"
def __init__(self, send: Callable):
super().__init__(
create_msg=CreateUserMessage,
get_msg=GetUserMessage,
get_all_msg=GetUsersMessage,
update_msg=UpdateUserMessage,
delete_msg=DeleteUserMessage,
send=send,
response_key=UserRequestAPI.response_key,
)
def __getitem__(self, key: int) -> Any:
return self.get(user_id=key)
def __delitem__(self, key: int) -> None:
self.delete(user_id=key)
|
2a12a7d2e2d06e64ca076563b8b68454e92fefae
|
service_fabfile.py
|
service_fabfile.py
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
deploy_static(static_dir='../assets/', cmd='python manage.py')
|
Deploy static files during installation
|
Deploy static files during installation
|
Python
|
bsd-3-clause
|
CorbanU/corban-shopify,CorbanU/corban-shopify
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
Deploy static files during installation
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
deploy_static(static_dir='../assets/', cmd='python manage.py')
|
<commit_before>from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
<commit_msg>Deploy static files during installation<commit_after>
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
deploy_static(static_dir='../assets/', cmd='python manage.py')
|
from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
Deploy static files during installationfrom fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
deploy_static(static_dir='../assets/', cmd='python manage.py')
|
<commit_before>from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
<commit_msg>Deploy static files during installation<commit_after>from fabric.api import *
from fabfile import install_requirements
from fabfile import migrate_db
def build(service=None):
"""Perform pre-installation tasks for the service."""
pass
def install(service=None):
"""Perform service specific post-installation tasks."""
install_requirements()
migrate_db(cmd='python manage.py')
deploy_static(static_dir='../assets/', cmd='python manage.py')
|
10e307a0dda94a9b38a1b7e143ef141e6062566b
|
skan/pipe.py
|
skan/pipe.py
|
from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = (None if self.image_format.get() == 'auto'
else self.image_format.get())
results = []
from skan import pre, csr
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if self.scale_metadata_path is not None:
md_path = self.scale_metadata_path.get().split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(self.threshold_radius.get() /
scale))
pixel_smoothing_radius = (self.smooth_radius.get() *
pixel_threshold_radius)
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=self.brightness_offset.get())
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
results = pd.concat(results)
|
from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = None if image_format == 'auto' else image_format
results = []
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if scale_metadata_path is not None:
md_path = scale_metadata_path.split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(threshold_radius / scale))
pixel_smoothing_radius = smooth_radius * pixel_threshold_radius
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=brightness_offset)
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
return pd.concat(results)
|
Add module for start-to-finish functions
|
Add module for start-to-finish functions
|
Python
|
bsd-3-clause
|
jni/skan
|
from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = (None if self.image_format.get() == 'auto'
else self.image_format.get())
results = []
from skan import pre, csr
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if self.scale_metadata_path is not None:
md_path = self.scale_metadata_path.get().split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(self.threshold_radius.get() /
scale))
pixel_smoothing_radius = (self.smooth_radius.get() *
pixel_threshold_radius)
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=self.brightness_offset.get())
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
results = pd.concat(results)
Add module for start-to-finish functions
|
from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = None if image_format == 'auto' else image_format
results = []
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if scale_metadata_path is not None:
md_path = scale_metadata_path.split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(threshold_radius / scale))
pixel_smoothing_radius = smooth_radius * pixel_threshold_radius
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=brightness_offset)
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
return pd.concat(results)
|
<commit_before>from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = (None if self.image_format.get() == 'auto'
else self.image_format.get())
results = []
from skan import pre, csr
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if self.scale_metadata_path is not None:
md_path = self.scale_metadata_path.get().split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(self.threshold_radius.get() /
scale))
pixel_smoothing_radius = (self.smooth_radius.get() *
pixel_threshold_radius)
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=self.brightness_offset.get())
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
results = pd.concat(results)
<commit_msg>Add module for start-to-finish functions<commit_after>
|
from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = None if image_format == 'auto' else image_format
results = []
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if scale_metadata_path is not None:
md_path = scale_metadata_path.split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(threshold_radius / scale))
pixel_smoothing_radius = smooth_radius * pixel_threshold_radius
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=brightness_offset)
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
return pd.concat(results)
|
from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = (None if self.image_format.get() == 'auto'
else self.image_format.get())
results = []
from skan import pre, csr
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if self.scale_metadata_path is not None:
md_path = self.scale_metadata_path.get().split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(self.threshold_radius.get() /
scale))
pixel_smoothing_radius = (self.smooth_radius.get() *
pixel_threshold_radius)
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=self.brightness_offset.get())
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
results = pd.concat(results)
Add module for start-to-finish functionsfrom . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = None if image_format == 'auto' else image_format
results = []
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if scale_metadata_path is not None:
md_path = scale_metadata_path.split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(threshold_radius / scale))
pixel_smoothing_radius = smooth_radius * pixel_threshold_radius
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=brightness_offset)
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
return pd.concat(results)
|
<commit_before>from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = (None if self.image_format.get() == 'auto'
else self.image_format.get())
results = []
from skan import pre, csr
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if self.scale_metadata_path is not None:
md_path = self.scale_metadata_path.get().split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(self.threshold_radius.get() /
scale))
pixel_smoothing_radius = (self.smooth_radius.get() *
pixel_threshold_radius)
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=self.brightness_offset.get())
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
results = pd.concat(results)
<commit_msg>Add module for start-to-finish functions<commit_after>from . import pre, csr
import imageio
import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = None if image_format == 'auto' else image_format
results = []
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if scale_metadata_path is not None:
md_path = scale_metadata_path.split(sep=',')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(threshold_radius / scale))
pixel_smoothing_radius = smooth_radius * pixel_threshold_radius
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=brightness_offset)
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
return pd.concat(results)
|
b74399679c739a70dd8e960cf63b4e9bd42bd65b
|
packager/core/test/test_check_dependencies.py
|
packager/core/test/test_check_dependencies.py
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
def test_child():
CheckDependencies("child")
def test_child():
CheckDependencies("sedflux")
|
#! /usr/bin/python
#from check_dependencies import CheckDependencies
#def test_default():
# CheckDependencies(None)
#def test_hydrotrend():
# CheckDependencies("hydrotrend")
#def test_cem():
# CheckDependencies("cem")
#def test_child():
# CheckDependencies("child")
#def test_child():
# CheckDependencies("sedflux")
|
Disable unit tests for packager.core.check_dependencies.py
|
Disable unit tests for packager.core.check_dependencies.py
|
Python
|
mit
|
csdms/packagebuilder
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
def test_child():
CheckDependencies("child")
def test_child():
CheckDependencies("sedflux")
Disable unit tests for packager.core.check_dependencies.py
|
#! /usr/bin/python
#from check_dependencies import CheckDependencies
#def test_default():
# CheckDependencies(None)
#def test_hydrotrend():
# CheckDependencies("hydrotrend")
#def test_cem():
# CheckDependencies("cem")
#def test_child():
# CheckDependencies("child")
#def test_child():
# CheckDependencies("sedflux")
|
<commit_before>#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
def test_child():
CheckDependencies("child")
def test_child():
CheckDependencies("sedflux")
<commit_msg>Disable unit tests for packager.core.check_dependencies.py<commit_after>
|
#! /usr/bin/python
#from check_dependencies import CheckDependencies
#def test_default():
# CheckDependencies(None)
#def test_hydrotrend():
# CheckDependencies("hydrotrend")
#def test_cem():
# CheckDependencies("cem")
#def test_child():
# CheckDependencies("child")
#def test_child():
# CheckDependencies("sedflux")
|
#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
def test_child():
CheckDependencies("child")
def test_child():
CheckDependencies("sedflux")
Disable unit tests for packager.core.check_dependencies.py#! /usr/bin/python
#from check_dependencies import CheckDependencies
#def test_default():
# CheckDependencies(None)
#def test_hydrotrend():
# CheckDependencies("hydrotrend")
#def test_cem():
# CheckDependencies("cem")
#def test_child():
# CheckDependencies("child")
#def test_child():
# CheckDependencies("sedflux")
|
<commit_before>#! /usr/bin/python
from check_dependencies import CheckDependencies
def test_default():
CheckDependencies(None)
def test_hydrotrend():
CheckDependencies("hydrotrend")
def test_cem():
CheckDependencies("cem")
def test_child():
CheckDependencies("child")
def test_child():
CheckDependencies("sedflux")
<commit_msg>Disable unit tests for packager.core.check_dependencies.py<commit_after>#! /usr/bin/python
#from check_dependencies import CheckDependencies
#def test_default():
# CheckDependencies(None)
#def test_hydrotrend():
# CheckDependencies("hydrotrend")
#def test_cem():
# CheckDependencies("cem")
#def test_child():
# CheckDependencies("child")
#def test_child():
# CheckDependencies("sedflux")
|
5d762fba65575b11ccbc15a23852d6b2d18b3f05
|
examples/qidle/qidle/utils.py
|
examples/qidle/qidle/utils.py
|
# -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
executables.append(os.path.join(path, 'python.exe'))
return executables
|
# -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
if 'scripts' in path.lower():
path = os.path.abspath(os.path.join(path, os.pardir))
executables.append(os.path.join(path, 'python.exe'))
return executables
|
Fix interpreter detection on windows
|
Fix interpreter detection on windows
|
Python
|
mit
|
mmolero/pyqode.python,zwadar/pyqode.python,pyQode/pyqode.python,pyQode/pyqode.python
|
# -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
executables.append(os.path.join(path, 'python.exe'))
return executables
Fix interpreter detection on windows
|
# -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
if 'scripts' in path.lower():
path = os.path.abspath(os.path.join(path, os.pardir))
executables.append(os.path.join(path, 'python.exe'))
return executables
|
<commit_before># -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
executables.append(os.path.join(path, 'python.exe'))
return executables
<commit_msg>Fix interpreter detection on windows<commit_after>
|
# -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
if 'scripts' in path.lower():
path = os.path.abspath(os.path.join(path, os.pardir))
executables.append(os.path.join(path, 'python.exe'))
return executables
|
# -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
executables.append(os.path.join(path, 'python.exe'))
return executables
Fix interpreter detection on windows# -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
if 'scripts' in path.lower():
path = os.path.abspath(os.path.join(path, os.pardir))
executables.append(os.path.join(path, 'python.exe'))
return executables
|
<commit_before># -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
executables.append(os.path.join(path, 'python.exe'))
return executables
<commit_msg>Fix interpreter detection on windows<commit_after># -*- coding: utf-8 -*-
from glob import glob
import os
import platform
def get_interpreters():
if platform.system().lower() == 'linux':
executables = [os.path.join('/usr/bin/', exe)
for exe in ['python2', 'python3']
if os.path.exists(os.path.join('/usr/bin/', exe))]
else:
executables = []
paths = os.environ['PATH'].split(';')
for path in paths:
if 'python' in path.lower():
if 'scripts' in path.lower():
path = os.path.abspath(os.path.join(path, os.pardir))
executables.append(os.path.join(path, 'python.exe'))
return executables
|
6abb42998633ebc3f530ebb8fc785255a6f360b3
|
auditlog/__manifest__.py
|
auditlog/__manifest__.py
|
# -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
'pre_init_hook': 'pre_init_hook',
}
|
# -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
}
|
Remove pre_init_hook reference from openerp, no pre_init hook exists any more
|
auditlog: Remove pre_init_hook reference from openerp, no pre_init hook exists any more
|
Python
|
agpl-3.0
|
thinkopensolutions/server-tools,ovnicraft/server-tools,ovnicraft/server-tools,thinkopensolutions/server-tools,ovnicraft/server-tools
|
# -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
'pre_init_hook': 'pre_init_hook',
}
auditlog: Remove pre_init_hook reference from openerp, no pre_init hook exists any more
|
# -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
'pre_init_hook': 'pre_init_hook',
}
<commit_msg>auditlog: Remove pre_init_hook reference from openerp, no pre_init hook exists any more<commit_after>
|
# -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
}
|
# -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
'pre_init_hook': 'pre_init_hook',
}
auditlog: Remove pre_init_hook reference from openerp, no pre_init hook exists any more# -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
'pre_init_hook': 'pre_init_hook',
}
<commit_msg>auditlog: Remove pre_init_hook reference from openerp, no pre_init hook exists any more<commit_after># -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
}
|
5b0b1af2f62051251cf2e71e8fc696b617f4f756
|
tests/modules/cloud_function_v2/test_plan.py
|
tests/modules/cloud_function_v2/test_plan.py
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions2_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
|
Fix tests refering to wrong IAM resource
|
Fix tests refering to wrong IAM resource
|
Python
|
apache-2.0
|
GoogleCloudPlatform/cloud-foundation-fabric,GoogleCloudPlatform/cloud-foundation-fabric,GoogleCloudPlatform/cloud-foundation-fabric,GoogleCloudPlatform/cloud-foundation-fabric
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
Fix tests refering to wrong IAM resource
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions2_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
|
<commit_before># Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
<commit_msg>Fix tests refering to wrong IAM resource<commit_after>
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions2_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
Fix tests refering to wrong IAM resource# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions2_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
|
<commit_before># Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
<commit_msg>Fix tests refering to wrong IAM resource<commit_after># Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
@pytest.fixture
def resources(plan_runner):
_, resources = plan_runner()
return resources
def test_resource_count(resources):
"Test number of resources created."
assert len(resources) == 3
def test_iam(resources):
"Test IAM binding resources."
bindings = [r['values'] for r in resources if r['type']
== 'google_cloudfunctions2_function_iam_binding']
assert len(bindings) == 1
assert bindings[0]['role'] == 'roles/cloudfunctions.invoker'
|
969fcfa12bcb734720c3e48c508329b687f91bf6
|
Cogs/Message.py
|
Cogs/Message.py
|
import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
|
import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
if not requestor.dm_channel:
# No dm channel - create it
await requestor.create_dm()
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
|
Create dm channel if it doesn't exist
|
Create dm channel if it doesn't exist
|
Python
|
mit
|
corpnewt/CorpBot.py,corpnewt/CorpBot.py
|
import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
Create dm channel if it doesn't exist
|
import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
if not requestor.dm_channel:
# No dm channel - create it
await requestor.create_dm()
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
|
<commit_before>import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
<commit_msg>Create dm channel if it doesn't exist<commit_after>
|
import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
if not requestor.dm_channel:
# No dm channel - create it
await requestor.create_dm()
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
|
import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
Create dm channel if it doesn't existimport asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
if not requestor.dm_channel:
# No dm channel - create it
await requestor.create_dm()
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
|
<commit_before>import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
<commit_msg>Create dm channel if it doesn't exist<commit_after>import asyncio
import discord
import textwrap
from discord.ext import commands
async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000):
"""A helper function to get the bot to cut his text into chunks."""
if not bot or not msg or not target:
return False
textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False)
if not len(textList):
return False
if not requestor.dm_channel:
# No dm channel - create it
await requestor.create_dm()
dmChannel = requestor.dm_channel
if len(textList) > maxMessage and dmChannel.id != target.id :
# PM the contents to the requestor
await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList)))
target = requestor
for message in textList:
await target.send(message)
return True
|
6c20f8a2c722fca1b2f811d4f06ea5480ec6d945
|
telethon/events/messagedeleted.py
|
telethon/events/messagedeleted.py
|
from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
|
from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
|
Set is private/group=True for messages deleted out of channels
|
Set is private/group=True for messages deleted out of channels
|
Python
|
mit
|
LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,expectocode/Telethon,LonamiWebs/Telethon
|
from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
Set is private/group=True for messages deleted out of channels
|
from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
|
<commit_before>from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
<commit_msg>Set is private/group=True for messages deleted out of channels<commit_after>
|
from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
|
from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
Set is private/group=True for messages deleted out of channelsfrom .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
|
<commit_before>from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
<commit_msg>Set is private/group=True for messages deleted out of channels<commit_after>from .common import EventBuilder, EventCommon, name_inner_event
from ..tl import types
@name_inner_event
class MessageDeleted(EventBuilder):
"""
Event fired when one or more messages are deleted.
"""
def build(self, update):
if isinstance(update, types.UpdateDeleteMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=None
)
elif isinstance(update, types.UpdateDeleteChannelMessages):
event = MessageDeleted.Event(
deleted_ids=update.messages,
peer=types.PeerChannel(update.channel_id)
)
else:
return
event._entities = update._entities
return self._filter_event(event)
class Event(EventCommon):
def __init__(self, deleted_ids, peer):
super().__init__(
chat_peer=peer, msg_id=(deleted_ids or [0])[0]
)
if peer is None:
# If it's not a channel ID, then it was private/small group.
# We can't know which one was exactly unless we logged all
# messages, but we can indicate that it was maybe either of
# both by setting them both to True.
self.is_private = self.is_group = True
self.deleted_id = None if not deleted_ids else deleted_ids[0]
self.deleted_ids = deleted_ids
|
fd1c9a1c3b2212216a7e73c8aa9be3d1423eaff4
|
info.py
|
info.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
p = subprocess.call(['erl','-name','ctl@127.0.0.1',
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
Add uuid to remote shell node name.
|
Add uuid to remote shell node name.
With this change it's possible to attach several remote shells to the
same cluster. Previously there would be a name conflict.
Change-Id: Ic85f99c8a7c27a80b37ecad994c39557934c7f50
Reviewed-on: http://review.couchbase.org/12365
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Reviewed-by: Aliaksey Kandratsenka <340b8e09ca65cd3fc686427fcfed17e87eaf61e2@gmail.com>
|
Python
|
apache-2.0
|
couchbase/couchbase-cli,couchbaselabs/couchbase-cli,membase/membase-cli,couchbase/couchbase-cli,couchbaselabs/couchbase-cli,membase/membase-cli,couchbaselabs/couchbase-cli,membase/membase-cli
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
p = subprocess.call(['erl','-name','ctl@127.0.0.1',
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
Add uuid to remote shell node name.
With this change it's possible to attach several remote shells to the
same cluster. Previously there would be a name conflict.
Change-Id: Ic85f99c8a7c27a80b37ecad994c39557934c7f50
Reviewed-on: http://review.couchbase.org/12365
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Reviewed-by: Aliaksey Kandratsenka <340b8e09ca65cd3fc686427fcfed17e87eaf61e2@gmail.com>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
p = subprocess.call(['erl','-name','ctl@127.0.0.1',
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
<commit_msg>Add uuid to remote shell node name.
With this change it's possible to attach several remote shells to the
same cluster. Previously there would be a name conflict.
Change-Id: Ic85f99c8a7c27a80b37ecad994c39557934c7f50
Reviewed-on: http://review.couchbase.org/12365
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Reviewed-by: Aliaksey Kandratsenka <340b8e09ca65cd3fc686427fcfed17e87eaf61e2@gmail.com><commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
p = subprocess.call(['erl','-name','ctl@127.0.0.1',
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
Add uuid to remote shell node name.
With this change it's possible to attach several remote shells to the
same cluster. Previously there would be a name conflict.
Change-Id: Ic85f99c8a7c27a80b37ecad994c39557934c7f50
Reviewed-on: http://review.couchbase.org/12365
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Reviewed-by: Aliaksey Kandratsenka <340b8e09ca65cd3fc686427fcfed17e87eaf61e2@gmail.com>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
p = subprocess.call(['erl','-name','ctl@127.0.0.1',
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
<commit_msg>Add uuid to remote shell node name.
With this change it's possible to attach several remote shells to the
same cluster. Previously there would be a name conflict.
Change-Id: Ic85f99c8a7c27a80b37ecad994c39557934c7f50
Reviewed-on: http://review.couchbase.org/12365
Tested-by: Aliaksey Artamonau <3c875bcfb3adf2a65b2ae7686ca921e6c9433147@gmail.com>
Reviewed-by: Aliaksey Kandratsenka <340b8e09ca65cd3fc686427fcfed17e87eaf61e2@gmail.com><commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides info about a particular server.
"""
from usage import usage
import restclient
import simplejson
import subprocess
import sys
from uuid import uuid1
class Info:
def __init__(self):
self.debug = False
def runCmd(self, cmd, server, port,
user, password, opts):
for (o, a) in opts:
if o == '-d' or o == '--debug':
self.debug = True
rest = restclient.RestClient(server, port, {'debug':self.debug})
opts = {'error_msg': 'server-info error'}
data = rest.restCmd('GET', '/nodes/self',
user, password, opts)
json = rest.getJson(data)
for x in ['license', 'licenseValid', 'licenseValidUntil']:
if x in json:
del(json[x])
if cmd == 'server-eshell':
name = 'ctl-%s' % str(uuid1())
p = subprocess.call(['erl','-name',name,
'-setcookie',json['otpCookie'],'-hidden','-remsh',json['otpNode']])
else:
print simplejson.dumps(json, sort_keys=True, indent=2)
|
4c8c287abd0615610ec0571431e142f86a8c76e8
|
tests/testapp/models.py
|
tests/testapp/models.py
|
from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User)
room = models.ForeignKey(Room)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
|
from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
|
Add on_delete parameter to ForegnKey fields in testapp Models. on_delete is mandatory from Django 2.0 onwards.
|
Add on_delete parameter to ForegnKey fields in testapp Models. on_delete is mandatory from Django 2.0 onwards.
|
Python
|
bsd-3-clause
|
mattiaslinnap/django-partial-index
|
from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User)
room = models.ForeignKey(Room)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
Add on_delete parameter to ForegnKey fields in testapp Models. on_delete is mandatory from Django 2.0 onwards.
|
from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
|
<commit_before>from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User)
room = models.ForeignKey(Room)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
<commit_msg>Add on_delete parameter to ForegnKey fields in testapp Models. on_delete is mandatory from Django 2.0 onwards.<commit_after>
|
from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
|
from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User)
room = models.ForeignKey(Room)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
Add on_delete parameter to ForegnKey fields in testapp Models. on_delete is mandatory from Django 2.0 onwards.from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
|
<commit_before>from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User)
room = models.ForeignKey(Room)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
<commit_msg>Add on_delete parameter to ForegnKey fields in testapp Models. on_delete is mandatory from Django 2.0 onwards.<commit_after>from django.db import models
from partial_index import PartialIndex
class AB(models.Model):
a = models.CharField(max_length=50)
b = models.CharField(max_length=50)
class User(models.Model):
name = models.CharField(max_length=50)
class Room(models.Model):
name = models.CharField(max_length=50)
class RoomBooking(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
room = models.ForeignKey(Room, on_delete=models.CASCADE)
deleted_at = models.DateTimeField(null=True, blank=True)
class Meta:
indexes = [PartialIndex(fields=['user', 'room'], unique=True, where='deleted_at IS NULL')]
class Job(models.Model):
order = models.IntegerField()
group = models.IntegerField()
is_complete = models.BooleanField(default=False)
class Meta:
indexes = [
PartialIndex(fields=['-order'], unique=False, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
PartialIndex(fields=['group'], unique=True, where_postgresql='is_complete = false', where_sqlite='is_complete = 0'),
]
|
30d643a6fed6d056f812db6c826e82e351d23c1d
|
litmus/cmds/__init__.py
|
litmus/cmds/__init__.py
|
#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
|
#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', shell=True, timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
|
Add shell=True to make sure that sdb does exist
|
Add shell=True to make sure that sdb does exist
|
Python
|
apache-2.0
|
dhs-shine/litmus
|
#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
Add shell=True to make sure that sdb does exist
|
#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', shell=True, timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
|
<commit_before>#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
<commit_msg>Add shell=True to make sure that sdb does exist<commit_after>
|
#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', shell=True, timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
|
#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
Add shell=True to make sure that sdb does exist#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', shell=True, timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
|
<commit_before>#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
<commit_msg>Add shell=True to make sure that sdb does exist<commit_after>#!/usr/bin/env python3
# Copyright 2015-2016 Samsung Electronics Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from configparser import RawConfigParser
from litmus.core.util import call
def load_project_list(projects):
"""docstring for load_project_list"""
configparser = RawConfigParser()
configparser.read(projects)
project_list = []
for section in configparser.sections():
item = dict(configparser.items(section))
item['name'] = section
project_list.append(item)
return project_list
def sdb_does_exist():
help_url = 'https://github.com/dhs-shine/litmus#prerequisite'
try:
call('sdb version', shell=True, timeout=10)
except FileNotFoundError:
raise Exception('Please install sdb. Refer to {}'.format(help_url))
return
|
edb905aec44e3fb2086ae87df960597e7b4c8356
|
scoring/machinelearning/neuralnetwork.py
|
scoring/machinelearning/neuralnetwork.py
|
## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors)).flatten()
|
## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors))
def score(X, y):
return linregress(self.predict(X), y)[2]**2
|
Add missing methods to NN class
|
Add missing methods to NN class
|
Python
|
bsd-3-clause
|
mwojcikowski/opendrugdiscovery
|
## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors)).flatten()
Add missing methods to NN class
|
## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors))
def score(X, y):
return linregress(self.predict(X), y)[2]**2
|
<commit_before>## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors)).flatten()
<commit_msg>Add missing methods to NN class<commit_after>
|
## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors))
def score(X, y):
return linregress(self.predict(X), y)[2]**2
|
## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors)).flatten()
Add missing methods to NN class## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors))
def score(X, y):
return linregress(self.predict(X), y)[2]**2
|
<commit_before>## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors)).flatten()
<commit_msg>Add missing methods to NN class<commit_after>## FIX use ffnet for now, use sklearn in future
from ffnet import ffnet,mlgraph,tmlgraph
import numpy as np
from scipy.stats import linregress
class neuralnetwork:
def __init__(self, shape, loadnet=None, full_conn=True, biases=False):
"""
shape: shape of a NN given as a tuple
"""
if loadnet:
self.model = ffnet()
self.model.load(loadnet)
else:
if full_conn:
conec = tmlgraph(shape, biases)
else:
conec = mlgraph(shapebiases)
self.model = ffnet(conec)
def fit(self, input_descriptors, target_values, train_alg='tnc'):
getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000)
def predict(self, input_descriptors):
return np.array(self.model.call(input_descriptors))
def score(X, y):
return linregress(self.predict(X), y)[2]**2
|
df18229b38a01d87076f3b13aee5bfd1f0f989c2
|
tunobase/blog/models.py
|
tunobase/blog/models.py
|
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class Meta:
verbose_name = 'Blog Category'
verbose_name_plural = 'Blog Categories'
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
Update blog model with a more descriptive name
|
Update blog model with a more descriptive name
|
Python
|
bsd-3-clause
|
unomena/tunobase,unomena/tunobase
|
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
Update blog model with a more descriptive name
|
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class Meta:
verbose_name = 'Blog Category'
verbose_name_plural = 'Blog Categories'
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
<commit_before>'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
<commit_msg>Update blog model with a more descriptive name<commit_after>
|
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class Meta:
verbose_name = 'Blog Category'
verbose_name_plural = 'Blog Categories'
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
Update blog model with a more descriptive name'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class Meta:
verbose_name = 'Blog Category'
verbose_name_plural = 'Blog Categories'
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
<commit_before>'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
<commit_msg>Update blog model with a more descriptive name<commit_after>'''
Blog App
This module determines how to display the Blog app in Django's admin
and lists other model functions.
'''
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from tunobase.core import models as core_models
class Blog(core_models.ContentModel):
'''
Blogs the Site has
'''
class Meta:
verbose_name = 'Blog Category'
verbose_name_plural = 'Blog Categories'
class BlogEntry(core_models.ContentModel):
'''
Entries per Blog
'''
blog = models.ForeignKey(Blog, related_name='entries')
author_users = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='blog_entries_authored',
null=True,
blank=True
)
authors_alternate = models.CharField(
max_length=512,
blank=True,
null=True
)
class Meta:
verbose_name_plural = 'Blog entries'
def get_absolute_url(self):
return reverse('blog_entry_detail', args=(self.slug,))
@property
def authors(self):
'''
Return a list of authors selected as users on the system and a list
of alternate authors as not users on the system if either exist
'''
authors_dict = {}
auth_users = self.author_users.all()
if auth_users:
authors_dict.update({
'users': auth_users
})
if self.authors_alternate:
authors_dict.update({
'alternate': self.authors_alternate.split(',')
})
return authors_dict
|
16bdf4d3951c7f88b96bd922b5d4273cd93c4d98
|
test_asgi_redis.py
|
test_asgi_redis.py
|
from asgi_redis import RedisChannelLayer
from asgiref.conformance import make_tests
channel_layer = RedisChannelLayer(expiry=1)
RedisTests = make_tests(channel_layer, expiry_delay=1.1)
|
import unittest
from asgi_redis import RedisChannelLayer
from asgiref.conformance import ConformanceTestCase
# Default conformance tests
class RedisLayerTests(ConformanceTestCase):
channel_layer = RedisChannelLayer(expiry=1, group_expiry=2)
expiry_delay = 1.1
|
Update to match new asgiref test style
|
Update to match new asgiref test style
|
Python
|
bsd-3-clause
|
django/asgi_redis
|
from asgi_redis import RedisChannelLayer
from asgiref.conformance import make_tests
channel_layer = RedisChannelLayer(expiry=1)
RedisTests = make_tests(channel_layer, expiry_delay=1.1)
Update to match new asgiref test style
|
import unittest
from asgi_redis import RedisChannelLayer
from asgiref.conformance import ConformanceTestCase
# Default conformance tests
class RedisLayerTests(ConformanceTestCase):
channel_layer = RedisChannelLayer(expiry=1, group_expiry=2)
expiry_delay = 1.1
|
<commit_before>from asgi_redis import RedisChannelLayer
from asgiref.conformance import make_tests
channel_layer = RedisChannelLayer(expiry=1)
RedisTests = make_tests(channel_layer, expiry_delay=1.1)
<commit_msg>Update to match new asgiref test style<commit_after>
|
import unittest
from asgi_redis import RedisChannelLayer
from asgiref.conformance import ConformanceTestCase
# Default conformance tests
class RedisLayerTests(ConformanceTestCase):
channel_layer = RedisChannelLayer(expiry=1, group_expiry=2)
expiry_delay = 1.1
|
from asgi_redis import RedisChannelLayer
from asgiref.conformance import make_tests
channel_layer = RedisChannelLayer(expiry=1)
RedisTests = make_tests(channel_layer, expiry_delay=1.1)
Update to match new asgiref test styleimport unittest
from asgi_redis import RedisChannelLayer
from asgiref.conformance import ConformanceTestCase
# Default conformance tests
class RedisLayerTests(ConformanceTestCase):
channel_layer = RedisChannelLayer(expiry=1, group_expiry=2)
expiry_delay = 1.1
|
<commit_before>from asgi_redis import RedisChannelLayer
from asgiref.conformance import make_tests
channel_layer = RedisChannelLayer(expiry=1)
RedisTests = make_tests(channel_layer, expiry_delay=1.1)
<commit_msg>Update to match new asgiref test style<commit_after>import unittest
from asgi_redis import RedisChannelLayer
from asgiref.conformance import ConformanceTestCase
# Default conformance tests
class RedisLayerTests(ConformanceTestCase):
channel_layer = RedisChannelLayer(expiry=1, group_expiry=2)
expiry_delay = 1.1
|
59d1d9cf834ee8b0b41398d03381cd33562d7574
|
test_gitcontrib.py
|
test_gitcontrib.py
|
# -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
gitcontrib.json_print(total, auth, expect)
out, err = capsys.readouterr()
j_data = json.loads(out)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
|
# -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
j_data = gitcontrib.jsonify((total, auth, expect)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
|
Fix json_print => jsonify in tests
|
Fix json_print => jsonify in tests
|
Python
|
mit
|
nickfrostatx/gitcontrib
|
# -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
gitcontrib.json_print(total, auth, expect)
out, err = capsys.readouterr()
j_data = json.loads(out)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
Fix json_print => jsonify in tests
|
# -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
j_data = gitcontrib.jsonify((total, auth, expect)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
|
<commit_before># -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
gitcontrib.json_print(total, auth, expect)
out, err = capsys.readouterr()
j_data = json.loads(out)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
<commit_msg>Fix json_print => jsonify in tests<commit_after>
|
# -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
j_data = gitcontrib.jsonify((total, auth, expect)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
|
# -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
gitcontrib.json_print(total, auth, expect)
out, err = capsys.readouterr()
j_data = json.loads(out)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
Fix json_print => jsonify in tests# -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
j_data = gitcontrib.jsonify((total, auth, expect)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
|
<commit_before># -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
gitcontrib.json_print(total, auth, expect)
out, err = capsys.readouterr()
j_data = json.loads(out)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
<commit_msg>Fix json_print => jsonify in tests<commit_after># -*- coding: utf-8 -*-
"""Test them contribs."""
import gitcontrib
import json
import pytest
import subprocess
import sys
u_string = 'Usage:\ngitcontrib [--json] [-p, --path path] [extension(s) ...]\n'
@pytest.fixture
def git_repo(tmpdir):
subprocess.check_call(['git', 'init', str(tmpdir)])
return tmpdir
def test_usage(capsys):
gitcontrib.usage()
out, err = capsys.readouterr()
assert err == u_string
def test_git(git_repo):
# NOTE XXX TODO NO NO NO NO WRONG BAD NO XXX
assert b'nothing to commit' in gitcontrib.git(str(git_repo), 'status')
def test_badArg(capsys):
sys.argv = ['gitcontrib', '-a']
gitcontrib.main()
out, err = capsys.readouterr()
assert err == u_string
def test_json(capsys):
total = 20
auth = {'a': 12, 'b': 2, 'c': 1, 'd': 5}
expect = 0.25
j_data = gitcontrib.jsonify((total, auth, expect)
assert j_data['a']['met_expected']
assert j_data['b']['lines'] == 2
|
d99ef1ab1dc414294a200d4dafcb0d21c2d3f6d8
|
webapp/byceps/blueprints/board/formatting.py
|
webapp/byceps/blueprints/board/formatting.py
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser.format(value)
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
return PARSER.format(value)
|
Create and reuse a single BBcode parser instance.
|
Create and reuse a single BBcode parser instance.
|
Python
|
bsd-3-clause
|
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser.format(value)
Create and reuse a single BBcode parser instance.
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
return PARSER.format(value)
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser.format(value)
<commit_msg>Create and reuse a single BBcode parser instance.<commit_after>
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
return PARSER.format(value)
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser.format(value)
Create and reuse a single BBcode parser instance.# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
return PARSER.format(value)
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser.format(value)
<commit_msg>Create and reuse a single BBcode parser instance.<commit_after># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.formatting
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
import bbcode
def create_parser():
"""Create a customized BBcode parser."""
parser = bbcode.Parser(replace_cosmetic=False)
# Replace image tags.
def render_image(name, value, options, parent, context):
return '<img src="{}"/>'.format(value)
parser.add_formatter('img', render_image, replace_links=False)
# Render quotes with optional author.
def render_quote(name, value, options, parent, context):
intro = ''
if 'author' in options:
author = options['author']
intro = '<p class="quote-intro"><cite>{}</cite> schrieb:</p>\n' \
.format(author)
return '{}<blockquote>{}</blockquote>'.format(intro, value)
parser.add_formatter('quote', render_quote, strip=True)
return parser
PARSER = create_parser()
def render_html(value):
"""Render text as HTML, interpreting BBcode."""
return PARSER.format(value)
|
6fec57fde4c67aeaf7622c6b1ee5d56fec2c5b57
|
image.py
|
image.py
|
"""Image."""
from PIL import Image
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
break
|
"""Image."""
from PIL import Image, ImageFilter
import numpy as np
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def edges(self):
"""Find edges of image."""
smooth = self.bmp.filter(ImageFilter.SMOOTH)
edges = smooth.filter(ImageFilter.FIND_EDGES)
self.bmp = edges
# WIP
@property
def size(self):
"""Return size of image in (x, y)."""
return self.bmp.size
@property
def matrix(self):
"""Return numpy matrix of image."""
size = self.size
matrix = []
for y in range(size[1]):
matrix.append([self.bmp.getpixel((x, y)) for x in range(size[0])])
return np.matrix(matrix)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
print image.matrix
break
|
Add matrix and edge detection
|
Add matrix and edge detection
|
Python
|
mit
|
anassinator/codejam,anassinator/codejam-2014
|
"""Image."""
from PIL import Image
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
break
Add matrix and edge detection
|
"""Image."""
from PIL import Image, ImageFilter
import numpy as np
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def edges(self):
"""Find edges of image."""
smooth = self.bmp.filter(ImageFilter.SMOOTH)
edges = smooth.filter(ImageFilter.FIND_EDGES)
self.bmp = edges
# WIP
@property
def size(self):
"""Return size of image in (x, y)."""
return self.bmp.size
@property
def matrix(self):
"""Return numpy matrix of image."""
size = self.size
matrix = []
for y in range(size[1]):
matrix.append([self.bmp.getpixel((x, y)) for x in range(size[0])])
return np.matrix(matrix)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
print image.matrix
break
|
<commit_before>"""Image."""
from PIL import Image
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
break
<commit_msg>Add matrix and edge detection<commit_after>
|
"""Image."""
from PIL import Image, ImageFilter
import numpy as np
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def edges(self):
"""Find edges of image."""
smooth = self.bmp.filter(ImageFilter.SMOOTH)
edges = smooth.filter(ImageFilter.FIND_EDGES)
self.bmp = edges
# WIP
@property
def size(self):
"""Return size of image in (x, y)."""
return self.bmp.size
@property
def matrix(self):
"""Return numpy matrix of image."""
size = self.size
matrix = []
for y in range(size[1]):
matrix.append([self.bmp.getpixel((x, y)) for x in range(size[0])])
return np.matrix(matrix)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
print image.matrix
break
|
"""Image."""
from PIL import Image
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
break
Add matrix and edge detection"""Image."""
from PIL import Image, ImageFilter
import numpy as np
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def edges(self):
"""Find edges of image."""
smooth = self.bmp.filter(ImageFilter.SMOOTH)
edges = smooth.filter(ImageFilter.FIND_EDGES)
self.bmp = edges
# WIP
@property
def size(self):
"""Return size of image in (x, y)."""
return self.bmp.size
@property
def matrix(self):
"""Return numpy matrix of image."""
size = self.size
matrix = []
for y in range(size[1]):
matrix.append([self.bmp.getpixel((x, y)) for x in range(size[0])])
return np.matrix(matrix)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
print image.matrix
break
|
<commit_before>"""Image."""
from PIL import Image
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
break
<commit_msg>Add matrix and edge detection<commit_after>"""Image."""
from PIL import Image, ImageFilter
import numpy as np
import os
class DatabaseImage(object):
"""Image from database."""
def __init__(self, path):
"""Construct DatabaseImage."""
self.path = path
self.bmp = Image.open(path)
filename = os.path.basename(path)
self.id = int(filename.split('_')[0])
self.sub_id = int(filename.split('_')[1])
def __str__(self):
"""Return string representation of DatabaseImage."""
return "{img.id:3} {img.sub_id}".format(img=self)
def edges(self):
"""Find edges of image."""
smooth = self.bmp.filter(ImageFilter.SMOOTH)
edges = smooth.filter(ImageFilter.FIND_EDGES)
self.bmp = edges
# WIP
@property
def size(self):
"""Return size of image in (x, y)."""
return self.bmp.size
@property
def matrix(self):
"""Return numpy matrix of image."""
size = self.size
matrix = []
for y in range(size[1]):
matrix.append([self.bmp.getpixel((x, y)) for x in range(size[0])])
return np.matrix(matrix)
def get_images():
"""Get images."""
for root, directories, filenames in os.walk('database'):
for filename in filenames:
if filename.endswith('.gif'):
path = os.path.join(root, filename)
yield DatabaseImage(path)
if __name__ == '__main__':
for image in get_images():
image.bmp.show()
print image.matrix
break
|
1326203c81db0973ff5e1472a2ad80499b6f2189
|
main.py
|
main.py
|
import csv
import logging
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
with open(Config.get('APP_OUTPUT_CSV'), 'w', newline='') as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
|
import csv
import logging
import os
import time
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
output_file_name = time.strftime('%d_%m_%Y') + '.csv'
output_path = os.path.join(Config.get('APP_OUTPUT_DIR'), output_file_name)
if not os.path.exists(Config.get('APP_OUTPUT_DIR')):
logger.info('Create directory, because not exist')
os.makedirs(Config.get('APP_OUTPUT_DIR'))
with open(output_path, 'w', newline='', encoding=Config.get('APP_OUTPUT_ENC')) as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
|
Add encoding support, move output to separate directory, change output filename to DD_MM_YYYY.csv
|
Add encoding support, move output to separate directory, change output filename to DD_MM_YYYY.csv
|
Python
|
mit
|
Holovin/D_GrabDemo
|
import csv
import logging
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
with open(Config.get('APP_OUTPUT_CSV'), 'w', newline='') as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
Add encoding support, move output to separate directory, change output filename to DD_MM_YYYY.csv
|
import csv
import logging
import os
import time
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
output_file_name = time.strftime('%d_%m_%Y') + '.csv'
output_path = os.path.join(Config.get('APP_OUTPUT_DIR'), output_file_name)
if not os.path.exists(Config.get('APP_OUTPUT_DIR')):
logger.info('Create directory, because not exist')
os.makedirs(Config.get('APP_OUTPUT_DIR'))
with open(output_path, 'w', newline='', encoding=Config.get('APP_OUTPUT_ENC')) as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
|
<commit_before>import csv
import logging
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
with open(Config.get('APP_OUTPUT_CSV'), 'w', newline='') as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
<commit_msg>Add encoding support, move output to separate directory, change output filename to DD_MM_YYYY.csv<commit_after>
|
import csv
import logging
import os
import time
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
output_file_name = time.strftime('%d_%m_%Y') + '.csv'
output_path = os.path.join(Config.get('APP_OUTPUT_DIR'), output_file_name)
if not os.path.exists(Config.get('APP_OUTPUT_DIR')):
logger.info('Create directory, because not exist')
os.makedirs(Config.get('APP_OUTPUT_DIR'))
with open(output_path, 'w', newline='', encoding=Config.get('APP_OUTPUT_ENC')) as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
|
import csv
import logging
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
with open(Config.get('APP_OUTPUT_CSV'), 'w', newline='') as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
Add encoding support, move output to separate directory, change output filename to DD_MM_YYYY.csvimport csv
import logging
import os
import time
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
output_file_name = time.strftime('%d_%m_%Y') + '.csv'
output_path = os.path.join(Config.get('APP_OUTPUT_DIR'), output_file_name)
if not os.path.exists(Config.get('APP_OUTPUT_DIR')):
logger.info('Create directory, because not exist')
os.makedirs(Config.get('APP_OUTPUT_DIR'))
with open(output_path, 'w', newline='', encoding=Config.get('APP_OUTPUT_ENC')) as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
|
<commit_before>import csv
import logging
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
with open(Config.get('APP_OUTPUT_CSV'), 'w', newline='') as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
<commit_msg>Add encoding support, move output to separate directory, change output filename to DD_MM_YYYY.csv<commit_after>import csv
import logging
import os
import time
from config.config import Config
from d_spider import DSpider
from dev.logger import logger_setup
def main():
# setup
logger_setup(Config.get('APP_LOG_FILE'), ['ddd_site_parse'])
# log
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
logger.info(' --- ')
logger.info('Start app...')
# bot
output_file_name = time.strftime('%d_%m_%Y') + '.csv'
output_path = os.path.join(Config.get('APP_OUTPUT_DIR'), output_file_name)
if not os.path.exists(Config.get('APP_OUTPUT_DIR')):
logger.info('Create directory, because not exist')
os.makedirs(Config.get('APP_OUTPUT_DIR'))
with open(output_path, 'w', newline='', encoding=Config.get('APP_OUTPUT_ENC')) as output:
writer = csv.writer(output, delimiter=';')
try:
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = DSpider(thread_number=threads_counter, logger_name='ddd_site_parse', writer=writer)
bot.run()
except Exception as e:
print(e)
logger.info('End app...\n\n')
if __name__ == '__main__':
main()
|
adcaa3bd5feb0939a6ffae8ce4637f5fd8369f2d
|
tests/base_test.py
|
tests/base_test.py
|
# -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
# -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def shortDescription(self):
"""
Get's the one liner description to be displayed.
Source:
http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
"""
doc = self._testMethodDoc
doc = doc and doc.split("\n")[0].strip() or ""
doc = "%s : %s" % (self.__class__.__name__, doc)
return doc
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
Improve testing docstring output for inherited classes
|
Improve testing docstring output for inherited classes
|
Python
|
mit
|
ashleysommer/sanic-cors,corydolphin/flask-cors
|
# -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
Improve testing docstring output for inherited classes
|
# -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def shortDescription(self):
"""
Get's the one liner description to be displayed.
Source:
http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
"""
doc = self._testMethodDoc
doc = doc and doc.split("\n")[0].strip() or ""
doc = "%s : %s" % (self.__class__.__name__, doc)
return doc
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
<commit_before># -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
<commit_msg>Improve testing docstring output for inherited classes<commit_after>
|
# -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def shortDescription(self):
"""
Get's the one liner description to be displayed.
Source:
http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
"""
doc = self._testMethodDoc
doc = doc and doc.split("\n")[0].strip() or ""
doc = "%s : %s" % (self.__class__.__name__, doc)
return doc
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
# -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
Improve testing docstring output for inherited classes# -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def shortDescription(self):
"""
Get's the one liner description to be displayed.
Source:
http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
"""
doc = self._testMethodDoc
doc = doc and doc.split("\n")[0].strip() or ""
doc = "%s : %s" % (self.__class__.__name__, doc)
return doc
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
<commit_before># -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
<commit_msg>Improve testing docstring output for inherited classes<commit_after># -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class FlaskCorsTestCase(unittest.TestCase):
def shortDescription(self):
"""
Get's the one liner description to be displayed.
Source:
http://erikzaadi.com/2012/09/13/inheritance-within-python-unit-tests/
"""
doc = self._testMethodDoc
doc = doc and doc.split("\n")[0].strip() or ""
doc = "%s : %s" % (self.__class__.__name__, doc)
return doc
def iter_verbs(self, c):
''' A simple helper method to iterate through a range of
HTTP Verbs and return the test_client bound instance,
keeping writing our tests as DRY as possible.
'''
for verb in ['get', 'head', 'options']:
yield getattr(c, verb)
def iter_responses(self, path, verbs=['get', 'head', 'options'], **kwargs):
with self.app.test_client() as c:
for verb in verbs:
yield getattr(c, verb.lower())(path, **kwargs)
class AppConfigTest(object):
def setUp(self):
self.app = None
def tearDown(self):
self.app = None
|
2693b563a80e6906ace3f97b17e42012404b5cdc
|
modules/ecrans/tools.py
|
modules/ecrans/tools.py
|
"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def id2url(_id):
"return an url from an id"
regexp2 = re.compile("(\w+).([0-9]+).(.*$)")
match = regexp2.match(_id)
if match:
return 'http://www.20minutes.fr/%s/%s/%s' % (match.group(1),
match.group(2),
match.group(3))
else:
raise ValueError("id doesn't match")
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
|
"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
|
Remove useless function (2O minutes code in ecrans...)
|
Remove useless function (2O minutes code in ecrans...)
|
Python
|
agpl-3.0
|
nojhan/weboob-devel,nojhan/weboob-devel,yannrouillard/weboob,laurent-george/weboob,Konubinix/weboob,frankrousseau/weboob,frankrousseau/weboob,RouxRC/weboob,sputnick-dev/weboob,Boussadia/weboob,willprice/weboob,Konubinix/weboob,Boussadia/weboob,Boussadia/weboob,nojhan/weboob-devel,yannrouillard/weboob,frankrousseau/weboob,willprice/weboob,Boussadia/weboob,laurent-george/weboob,RouxRC/weboob,Konubinix/weboob,willprice/weboob,sputnick-dev/weboob,laurent-george/weboob,sputnick-dev/weboob,RouxRC/weboob,yannrouillard/weboob
|
"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def id2url(_id):
"return an url from an id"
regexp2 = re.compile("(\w+).([0-9]+).(.*$)")
match = regexp2.match(_id)
if match:
return 'http://www.20minutes.fr/%s/%s/%s' % (match.group(1),
match.group(2),
match.group(3))
else:
raise ValueError("id doesn't match")
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
Remove useless function (2O minutes code in ecrans...)
|
"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
|
<commit_before>"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def id2url(_id):
"return an url from an id"
regexp2 = re.compile("(\w+).([0-9]+).(.*$)")
match = regexp2.match(_id)
if match:
return 'http://www.20minutes.fr/%s/%s/%s' % (match.group(1),
match.group(2),
match.group(3))
else:
raise ValueError("id doesn't match")
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
<commit_msg>Remove useless function (2O minutes code in ecrans...)<commit_after>
|
"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
|
"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def id2url(_id):
"return an url from an id"
regexp2 = re.compile("(\w+).([0-9]+).(.*$)")
match = regexp2.match(_id)
if match:
return 'http://www.20minutes.fr/%s/%s/%s' % (match.group(1),
match.group(2),
match.group(3))
else:
raise ValueError("id doesn't match")
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
Remove useless function (2O minutes code in ecrans...)"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
|
<commit_before>"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def id2url(_id):
"return an url from an id"
regexp2 = re.compile("(\w+).([0-9]+).(.*$)")
match = regexp2.match(_id)
if match:
return 'http://www.20minutes.fr/%s/%s/%s' % (match.group(1),
match.group(2),
match.group(3))
else:
raise ValueError("id doesn't match")
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
<commit_msg>Remove useless function (2O minutes code in ecrans...)<commit_after>"tools for lefigaro backend"
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
def url2id(url):
"return an id from an url"
regexp = re.compile("(^.*),([0-9]+)\.html$")
match = regexp.match(url)
if match:
return match.group(2)
else:
raise ValueError("Can't find an id for the url")
def rssid(entry):
return url2id(entry.id)
|
9ec0c5dc170db0f6ffa05c09ea1d0f3e950b76a5
|
djstripe/management/commands/djstripe_sync_customers.py
|
djstripe/management/commands/djstripe_sync_customers.py
|
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
|
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import User
from ...sync import sync_customer
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
|
Make this work with Django 1.4
|
Make this work with Django 1.4
|
Python
|
mit
|
cjrh/dj-stripe,koobs/dj-stripe,tkwon/dj-stripe,aliev/dj-stripe,kavdev/dj-stripe,ctrengove/dj-stripe,StErMi/dj-stripe,kavdev/dj-stripe,mthornhill/dj-stripe,areski/dj-stripe,aliev/dj-stripe,maxmalynowsky/django-stripe-rest,areski/dj-stripe,jleclanche/dj-stripe,rawjam/dj-stripe,koobs/dj-stripe,doctorwidget/dj-stripe,rawjam/dj-stripe,jameshiew/dj-stripe,davidgillies/dj-stripe,jleclanche/dj-stripe,benmurden/dj-stripe,iddqd1/dj-stripe,photocrowd/dj-stripe,jameshiew/dj-stripe,andrewyoung1991/dj-stripe,LaunchlabAU/dj-stripe,davidgillies/dj-stripe,benmurden/dj-stripe,mwarkentin/dj-stripe,tkwon/dj-stripe,dj-stripe/dj-stripe,andrewyoung1991/dj-stripe,LaunchlabAU/dj-stripe,dj-stripe/dj-stripe,mwarkentin/dj-stripe,StErMi/dj-stripe,cjrh/dj-stripe,pydanny/dj-stripe,doctorwidget/dj-stripe,mthornhill/dj-stripe,ctrengove/dj-stripe,iddqd1/dj-stripe,photocrowd/dj-stripe,jpadilla/dj-stripe,jpadilla/dj-stripe,pydanny/dj-stripe
|
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
Make this work with Django 1.4
|
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import User
from ...sync import sync_customer
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
|
<commit_before>from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
<commit_msg>Make this work with Django 1.4<commit_after>
|
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import User
from ...sync import sync_customer
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
|
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
Make this work with Django 1.4from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import User
from ...sync import sync_customer
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
|
<commit_before>from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
<commit_msg>Make this work with Django 1.4<commit_after>from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import User
from ...sync import sync_customer
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print("[{0}/{1} {2}%] Syncing {3} [{4}]").format(
count, total, perc, user.username, user.pk
)
sync_customer(user)
|
74e240d3e2e397eb8f3b0e63a1666412c3c1c66b
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from config import config
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
|
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
bootstrap = Bootstrap()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
|
Add flask-bootstrap to the mix
|
Add flask-bootstrap to the mix
|
Python
|
mit
|
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
|
from flask import Flask
from config import config
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
Add flask-bootstrap to the mix
|
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
bootstrap = Bootstrap()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
|
<commit_before>from flask import Flask
from config import config
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
<commit_msg>Add flask-bootstrap to the mix<commit_after>
|
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
bootstrap = Bootstrap()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
|
from flask import Flask
from config import config
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
Add flask-bootstrap to the mixfrom flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
bootstrap = Bootstrap()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
|
<commit_before>from flask import Flask
from config import config
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
<commit_msg>Add flask-bootstrap to the mix<commit_after>from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
bootstrap = Bootstrap()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
|
e1bc92abaf23002c37b9a8b7e5bf12b175be1a40
|
tools/translate.py
|
tools/translate.py
|
#!/usr/bin/python
import re
import os
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
path = '../web/l10n/'
files = [f for f in os.listdir(path) if os.path.isfile(path + f) and f.endswith('.js') and not f.endswith('en.js')]
for f in files:
f = path + f
print 'en -> ' + f[-5:-3]
dict = {}
for line in open(f).read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
dict[match.group(1)] = match.group(2)
out = open(f, 'w')
for line in open(path + 'en.js').read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
if dict.has_key(match.group(1)):
value = dict[match.group(1)]
else:
print '"' + match.group(2) + '"'
value = match.group(2) + ' (*)'
out.write(' ' + match.group(1) + ": '" + value + "'")
if match.group(3) is not None:
out.write(',')
out.write('\n')
else:
out.write(line + '\n')
|
#!/usr/bin/python
import os
import optparse
import urllib2
import json
import base64
parser = optparse.OptionParser()
parser.add_option("-u", "--user", dest="username", help="transifex user login")
parser.add_option("-p", "--password", dest="password", help="transifex user password")
(options, args) = parser.parse_args()
if not options.username or not options.password:
parser.error('User name and password are required')
os.chdir(os.path.dirname(os.path.abspath(__file__)))
path = "../web/l10n/"
def request(url):
req = urllib2.Request(url)
auth = base64.encodestring("%s:%s" % (options.username, options.password)).replace("\n", "")
req.add_header("Authorization", "Basic %s" % auth)
return urllib2.urlopen(req)
resource = json.load(request("https://www.transifex.com/api/2/project/traccar/resource/web/?details"))
for language in resource["available_languages"]:
code = language["code"]
data = request("https://www.transifex.com/api/2/project/traccar/resource/web/translation/" + code + "?file")
file = open(path + code + ".json", "wb")
file.write(data.read())
file.close()
|
Use transifex service for tranlation
|
Use transifex service for tranlation
|
Python
|
apache-2.0
|
joseant/traccar-1,vipien/traccar,tananaev/traccar,jon-stumpf/traccar,jon-stumpf/traccar,al3x1s/traccar,AnshulJain1985/Roadcast-Tracker,joseant/traccar-1,AnshulJain1985/Roadcast-Tracker,al3x1s/traccar,tsmgeek/traccar,tsmgeek/traccar,ninioe/traccar,jon-stumpf/traccar,5of9/traccar,tananaev/traccar,orcoliver/traccar,tananaev/traccar,orcoliver/traccar,renaudallard/traccar,duke2906/traccar,jssenyange/traccar,jssenyange/traccar,ninioe/traccar,ninioe/traccar,stalien/traccar_test,renaudallard/traccar,duke2906/traccar,vipien/traccar,stalien/traccar_test,5of9/traccar,jssenyange/traccar,orcoliver/traccar,tsmgeek/traccar
|
#!/usr/bin/python
import re
import os
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
path = '../web/l10n/'
files = [f for f in os.listdir(path) if os.path.isfile(path + f) and f.endswith('.js') and not f.endswith('en.js')]
for f in files:
f = path + f
print 'en -> ' + f[-5:-3]
dict = {}
for line in open(f).read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
dict[match.group(1)] = match.group(2)
out = open(f, 'w')
for line in open(path + 'en.js').read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
if dict.has_key(match.group(1)):
value = dict[match.group(1)]
else:
print '"' + match.group(2) + '"'
value = match.group(2) + ' (*)'
out.write(' ' + match.group(1) + ": '" + value + "'")
if match.group(3) is not None:
out.write(',')
out.write('\n')
else:
out.write(line + '\n')
Use transifex service for tranlation
|
#!/usr/bin/python
import os
import optparse
import urllib2
import json
import base64
parser = optparse.OptionParser()
parser.add_option("-u", "--user", dest="username", help="transifex user login")
parser.add_option("-p", "--password", dest="password", help="transifex user password")
(options, args) = parser.parse_args()
if not options.username or not options.password:
parser.error('User name and password are required')
os.chdir(os.path.dirname(os.path.abspath(__file__)))
path = "../web/l10n/"
def request(url):
req = urllib2.Request(url)
auth = base64.encodestring("%s:%s" % (options.username, options.password)).replace("\n", "")
req.add_header("Authorization", "Basic %s" % auth)
return urllib2.urlopen(req)
resource = json.load(request("https://www.transifex.com/api/2/project/traccar/resource/web/?details"))
for language in resource["available_languages"]:
code = language["code"]
data = request("https://www.transifex.com/api/2/project/traccar/resource/web/translation/" + code + "?file")
file = open(path + code + ".json", "wb")
file.write(data.read())
file.close()
|
<commit_before>#!/usr/bin/python
import re
import os
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
path = '../web/l10n/'
files = [f for f in os.listdir(path) if os.path.isfile(path + f) and f.endswith('.js') and not f.endswith('en.js')]
for f in files:
f = path + f
print 'en -> ' + f[-5:-3]
dict = {}
for line in open(f).read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
dict[match.group(1)] = match.group(2)
out = open(f, 'w')
for line in open(path + 'en.js').read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
if dict.has_key(match.group(1)):
value = dict[match.group(1)]
else:
print '"' + match.group(2) + '"'
value = match.group(2) + ' (*)'
out.write(' ' + match.group(1) + ": '" + value + "'")
if match.group(3) is not None:
out.write(',')
out.write('\n')
else:
out.write(line + '\n')
<commit_msg>Use transifex service for tranlation<commit_after>
|
#!/usr/bin/python
import os
import optparse
import urllib2
import json
import base64
parser = optparse.OptionParser()
parser.add_option("-u", "--user", dest="username", help="transifex user login")
parser.add_option("-p", "--password", dest="password", help="transifex user password")
(options, args) = parser.parse_args()
if not options.username or not options.password:
parser.error('User name and password are required')
os.chdir(os.path.dirname(os.path.abspath(__file__)))
path = "../web/l10n/"
def request(url):
req = urllib2.Request(url)
auth = base64.encodestring("%s:%s" % (options.username, options.password)).replace("\n", "")
req.add_header("Authorization", "Basic %s" % auth)
return urllib2.urlopen(req)
resource = json.load(request("https://www.transifex.com/api/2/project/traccar/resource/web/?details"))
for language in resource["available_languages"]:
code = language["code"]
data = request("https://www.transifex.com/api/2/project/traccar/resource/web/translation/" + code + "?file")
file = open(path + code + ".json", "wb")
file.write(data.read())
file.close()
|
#!/usr/bin/python
import re
import os
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
path = '../web/l10n/'
files = [f for f in os.listdir(path) if os.path.isfile(path + f) and f.endswith('.js') and not f.endswith('en.js')]
for f in files:
f = path + f
print 'en -> ' + f[-5:-3]
dict = {}
for line in open(f).read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
dict[match.group(1)] = match.group(2)
out = open(f, 'w')
for line in open(path + 'en.js').read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
if dict.has_key(match.group(1)):
value = dict[match.group(1)]
else:
print '"' + match.group(2) + '"'
value = match.group(2) + ' (*)'
out.write(' ' + match.group(1) + ": '" + value + "'")
if match.group(3) is not None:
out.write(',')
out.write('\n')
else:
out.write(line + '\n')
Use transifex service for tranlation#!/usr/bin/python
import os
import optparse
import urllib2
import json
import base64
parser = optparse.OptionParser()
parser.add_option("-u", "--user", dest="username", help="transifex user login")
parser.add_option("-p", "--password", dest="password", help="transifex user password")
(options, args) = parser.parse_args()
if not options.username or not options.password:
parser.error('User name and password are required')
os.chdir(os.path.dirname(os.path.abspath(__file__)))
path = "../web/l10n/"
def request(url):
req = urllib2.Request(url)
auth = base64.encodestring("%s:%s" % (options.username, options.password)).replace("\n", "")
req.add_header("Authorization", "Basic %s" % auth)
return urllib2.urlopen(req)
resource = json.load(request("https://www.transifex.com/api/2/project/traccar/resource/web/?details"))
for language in resource["available_languages"]:
code = language["code"]
data = request("https://www.transifex.com/api/2/project/traccar/resource/web/translation/" + code + "?file")
file = open(path + code + ".json", "wb")
file.write(data.read())
file.close()
|
<commit_before>#!/usr/bin/python
import re
import os
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
path = '../web/l10n/'
files = [f for f in os.listdir(path) if os.path.isfile(path + f) and f.endswith('.js') and not f.endswith('en.js')]
for f in files:
f = path + f
print 'en -> ' + f[-5:-3]
dict = {}
for line in open(f).read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
dict[match.group(1)] = match.group(2)
out = open(f, 'w')
for line in open(path + 'en.js').read().splitlines():
match = re.search(" (\\w+): '(.+)'(,)?", line)
if match:
if dict.has_key(match.group(1)):
value = dict[match.group(1)]
else:
print '"' + match.group(2) + '"'
value = match.group(2) + ' (*)'
out.write(' ' + match.group(1) + ": '" + value + "'")
if match.group(3) is not None:
out.write(',')
out.write('\n')
else:
out.write(line + '\n')
<commit_msg>Use transifex service for tranlation<commit_after>#!/usr/bin/python
import os
import optparse
import urllib2
import json
import base64
parser = optparse.OptionParser()
parser.add_option("-u", "--user", dest="username", help="transifex user login")
parser.add_option("-p", "--password", dest="password", help="transifex user password")
(options, args) = parser.parse_args()
if not options.username or not options.password:
parser.error('User name and password are required')
os.chdir(os.path.dirname(os.path.abspath(__file__)))
path = "../web/l10n/"
def request(url):
req = urllib2.Request(url)
auth = base64.encodestring("%s:%s" % (options.username, options.password)).replace("\n", "")
req.add_header("Authorization", "Basic %s" % auth)
return urllib2.urlopen(req)
resource = json.load(request("https://www.transifex.com/api/2/project/traccar/resource/web/?details"))
for language in resource["available_languages"]:
code = language["code"]
data = request("https://www.transifex.com/api/2/project/traccar/resource/web/translation/" + code + "?file")
file = open(path + code + ".json", "wb")
file.write(data.read())
file.close()
|
cc929731dbbf51e00d748aa6cc335d4cd8bb705b
|
soco/__init__.py
|
soco/__init__.py
|
"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.22.0"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.23-dev"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Set up for v0.23 development
|
Set up for v0.23 development
|
Python
|
mit
|
SoCo/SoCo,SoCo/SoCo
|
"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.22.0"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
Set up for v0.23 development
|
"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.23-dev"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
<commit_before>"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.22.0"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
<commit_msg>Set up for v0.23 development<commit_after>
|
"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.23-dev"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.22.0"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
Set up for v0.23 development"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.23-dev"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
<commit_before>"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.22.0"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
<commit_msg>Set up for v0.23 development<commit_after>"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.23-dev"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
4c12b100531597b2f6356b3512c9adf462122e3d
|
nova/scheduler/utils.py
|
nova/scheduler/utils.py
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova.openstack.common import jsonutils
def build_request_spec(image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': flavors.extract_flavor(instance),
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova import db
from nova.openstack.common import jsonutils
def build_request_spec(ctxt, image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
instance_type = flavors.extract_flavor(instance)
# NOTE(comstud): This is a bit ugly, but will get cleaned up when
# we're passing an InstanceType internal object.
extra_specs = db.instance_type_extra_specs_get(ctxt,
instance_type['flavorid'])
instance_type['extra_specs'] = extra_specs
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': instance_type,
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
|
Make sure instance_type has extra_specs
|
Make sure instance_type has extra_specs
Make sure that when scheduling, the instance_type used in filters
contains the 'extra_specs'. This is a bit ugly, but will get cleaned up
with objects.
Fixes bug 1192331
Change-Id: I3614f3a858840c9561b4e618fc30f3d3ae5ac689
|
Python
|
apache-2.0
|
n0ano/gantt,n0ano/gantt
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova.openstack.common import jsonutils
def build_request_spec(image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': flavors.extract_flavor(instance),
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
Make sure instance_type has extra_specs
Make sure that when scheduling, the instance_type used in filters
contains the 'extra_specs'. This is a bit ugly, but will get cleaned up
with objects.
Fixes bug 1192331
Change-Id: I3614f3a858840c9561b4e618fc30f3d3ae5ac689
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova import db
from nova.openstack.common import jsonutils
def build_request_spec(ctxt, image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
instance_type = flavors.extract_flavor(instance)
# NOTE(comstud): This is a bit ugly, but will get cleaned up when
# we're passing an InstanceType internal object.
extra_specs = db.instance_type_extra_specs_get(ctxt,
instance_type['flavorid'])
instance_type['extra_specs'] = extra_specs
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': instance_type,
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
|
<commit_before># All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova.openstack.common import jsonutils
def build_request_spec(image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': flavors.extract_flavor(instance),
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
<commit_msg>Make sure instance_type has extra_specs
Make sure that when scheduling, the instance_type used in filters
contains the 'extra_specs'. This is a bit ugly, but will get cleaned up
with objects.
Fixes bug 1192331
Change-Id: I3614f3a858840c9561b4e618fc30f3d3ae5ac689<commit_after>
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova import db
from nova.openstack.common import jsonutils
def build_request_spec(ctxt, image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
instance_type = flavors.extract_flavor(instance)
# NOTE(comstud): This is a bit ugly, but will get cleaned up when
# we're passing an InstanceType internal object.
extra_specs = db.instance_type_extra_specs_get(ctxt,
instance_type['flavorid'])
instance_type['extra_specs'] = extra_specs
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': instance_type,
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova.openstack.common import jsonutils
def build_request_spec(image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': flavors.extract_flavor(instance),
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
Make sure instance_type has extra_specs
Make sure that when scheduling, the instance_type used in filters
contains the 'extra_specs'. This is a bit ugly, but will get cleaned up
with objects.
Fixes bug 1192331
Change-Id: I3614f3a858840c9561b4e618fc30f3d3ae5ac689# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova import db
from nova.openstack.common import jsonutils
def build_request_spec(ctxt, image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
instance_type = flavors.extract_flavor(instance)
# NOTE(comstud): This is a bit ugly, but will get cleaned up when
# we're passing an InstanceType internal object.
extra_specs = db.instance_type_extra_specs_get(ctxt,
instance_type['flavorid'])
instance_type['extra_specs'] = extra_specs
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': instance_type,
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
|
<commit_before># All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova.openstack.common import jsonutils
def build_request_spec(image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': flavors.extract_flavor(instance),
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
<commit_msg>Make sure instance_type has extra_specs
Make sure that when scheduling, the instance_type used in filters
contains the 'extra_specs'. This is a bit ugly, but will get cleaned up
with objects.
Fixes bug 1192331
Change-Id: I3614f3a858840c9561b4e618fc30f3d3ae5ac689<commit_after># All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for scheduling."""
from nova.compute import flavors
from nova import db
from nova.openstack.common import jsonutils
def build_request_spec(ctxt, image, instances):
"""Build a request_spec for the scheduler.
The request_spec assumes that all instances to be scheduled are the same
type.
"""
instance = instances[0]
instance_type = flavors.extract_flavor(instance)
# NOTE(comstud): This is a bit ugly, but will get cleaned up when
# we're passing an InstanceType internal object.
extra_specs = db.instance_type_extra_specs_get(ctxt,
instance_type['flavorid'])
instance_type['extra_specs'] = extra_specs
request_spec = {
'image': image,
'instance_properties': instance,
'instance_type': instance_type,
'instance_uuids': [inst['uuid'] for inst in instances]}
return jsonutils.to_primitive(request_spec)
|
47a41af1201085a7ed4f75a1a1ad27d38a3dba70
|
ansible/roles/pico-web/files/start_competition.py
|
ansible/roles/pico-web/files/start_competition.py
|
#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
api.events.add_event("Global", eligibility_conditions={})
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
|
Add a default Global event
|
Add a default Global event
|
Python
|
mit
|
royragsdale/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF
|
#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
Add a default Global event
|
#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
api.events.add_event("Global", eligibility_conditions={})
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
<commit_msg>Add a default Global event<commit_after>
|
#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
api.events.add_event("Global", eligibility_conditions={})
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
Add a default Global event#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
api.events.add_event("Global", eligibility_conditions={})
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
<commit_msg>Add a default Global event<commit_after>#!/usr/bin/env python3
# Simple script to programmatically start a competition useful for development
# and testing purposes. Defaults to 1 year.
# If using a custom APP_SETTINGS_FILE, ensure the appropriate
# environment variable is set prior to running this script. This script is best
# run from the pico-web role (ansible/roles/pico-web/tasks/main.yml)
from datetime import datetime, timedelta
import api
def main():
with api.create_app().app_context():
api.events.add_event("Global", eligibility_conditions={})
settings = api.config.get_settings()
settings["start_time"] = datetime.now()
settings["end_time"] = settings["start_time"] + timedelta(weeks=52)
api.config.change_settings(settings)
if __name__ == "__main__":
main()
|
412727440beb678ba3beef78ee0b934d412afe64
|
examples/permissionsexample/views.py
|
examples/permissionsexample/views.py
|
from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.**
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
|
from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.** or use curl:
`curl -X GET -H 'Accept: application/json' -u test:test http://localhost:8000/permissions-example`
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
|
Add an extra explenation on how to use curl on this view.
|
Add an extra explenation on how to use curl on this view.
|
Python
|
bsd-2-clause
|
cyberj/django-rest-framework,johnraz/django-rest-framework,dmwyatt/django-rest-framework,ajaali/django-rest-framework,d0ugal/django-rest-framework,adambain-vokal/django-rest-framework,linovia/django-rest-framework,ticosax/django-rest-framework,xiaotangyuan/django-rest-framework,alacritythief/django-rest-framework,akalipetis/django-rest-framework,iheitlager/django-rest-framework,raphaelmerx/django-rest-framework,kylefox/django-rest-framework,ticosax/django-rest-framework,tomchristie/django-rest-framework,fishky/django-rest-framework,AlexandreProenca/django-rest-framework,elim/django-rest-framework,jerryhebert/django-rest-framework,kennydude/django-rest-framework,antonyc/django-rest-framework,kennydude/django-rest-framework,nhorelik/django-rest-framework,leeahoward/django-rest-framework,edx/django-rest-framework,kennydude/django-rest-framework,ossanna16/django-rest-framework,rafaelang/django-rest-framework,davesque/django-rest-framework,callorico/django-rest-framework,wangpanjun/django-rest-framework,zeldalink0515/django-rest-framework,paolopaolopaolo/django-rest-framework,kezabelle/django-rest-framework,jness/django-rest-framework,adambain-vokal/django-rest-framework,rafaelcaricio/django-rest-framework,wangpanjun/django-rest-framework,ebsaral/django-rest-framework,MJafarMashhadi/django-rest-framework,tigeraniya/django-rest-framework,vstoykov/django-rest-framework,wangpanjun/django-rest-framework,jpulec/django-rest-framework,leeahoward/django-rest-framework,sbellem/django-rest-framework,abdulhaq-e/django-rest-framework,gregmuellegger/django-rest-framework,rubendura/django-rest-framework,maryokhin/django-rest-framework,qsorix/django-rest-framework,d0ugal/django-rest-framework,ajaali/django-rest-framework,uploadcare/django-rest-framework,wzbozon/django-rest-framework,leeahoward/django-rest-framework,linovia/django-rest-framework,elim/django-rest-framework,jtiai/django-rest-framework,arpheno/django-rest-framework,abdulhaq-e/django-rest-framework,uruz/django-rest-framework,tigeraniya/django-rest-framework,alacritythief/django-rest-framework,jness/django-rest-framework,damycra/django-rest-framework,mgaitan/django-rest-framework,andriy-s/django-rest-framework,delinhabit/django-rest-framework,kgeorgy/django-rest-framework,wwj718/django-rest-framework,sheppard/django-rest-framework,werthen/django-rest-framework,delinhabit/django-rest-framework,VishvajitP/django-rest-framework,VishvajitP/django-rest-framework,tcroiset/django-rest-framework,akalipetis/django-rest-framework,xiaotangyuan/django-rest-framework,thedrow/django-rest-framework-1,canassa/django-rest-framework,mgaitan/django-rest-framework,justanr/django-rest-framework,jerryhebert/django-rest-framework,ashishfinoit/django-rest-framework,hnakamur/django-rest-framework,kylefox/django-rest-framework,rhblind/django-rest-framework,simudream/django-rest-framework,iheitlager/django-rest-framework,krinart/django-rest-framework,sehmaschine/django-rest-framework,simudream/django-rest-framework,dmwyatt/django-rest-framework,callorico/django-rest-framework,ambivalentno/django-rest-framework,justanr/django-rest-framework,wedaly/django-rest-framework,bluedazzle/django-rest-framework,douwevandermeij/django-rest-framework,maryokhin/django-rest-framework,mgaitan/django-rest-framework,brandoncazander/django-rest-framework,ashishfinoit/django-rest-framework,potpath/django-rest-framework,jpulec/django-rest-framework,wwj718/django-rest-framework,kgeorgy/django-rest-framework,rhblind/django-rest-framework,canassa/django-rest-framework,antonyc/django-rest-framework,cheif/django-rest-framework,dmwyatt/django-rest-framework,ezheidtmann/django-rest-framework,James1345/django-rest-framework,rubendura/django-rest-framework,zeldalink0515/django-rest-framework,rafaelcaricio/django-rest-framework,YBJAY00000/django-rest-framework,delinhabit/django-rest-framework,jpadilla/django-rest-framework,douwevandermeij/django-rest-framework,ambivalentno/django-rest-framework,hunter007/django-rest-framework,alacritythief/django-rest-framework,wedaly/django-rest-framework,davesque/django-rest-framework,ashishfinoit/django-rest-framework,yiyocx/django-rest-framework,xiaotangyuan/django-rest-framework,callorico/django-rest-framework,raphaelmerx/django-rest-framework,thedrow/django-rest-framework-1,fishky/django-rest-framework,douwevandermeij/django-rest-framework,jpadilla/django-rest-framework,johnraz/django-rest-framework,hnakamur/django-rest-framework,sbellem/django-rest-framework,damycra/django-rest-framework,yiyocx/django-rest-framework,d0ugal/django-rest-framework,edx/django-rest-framework,lubomir/django-rest-framework,uruz/django-rest-framework,krinart/django-rest-framework,hnarayanan/django-rest-framework,edx/django-rest-framework,fishky/django-rest-framework,tcroiset/django-rest-framework,hnarayanan/django-rest-framework,gregmuellegger/django-rest-framework,zeldalink0515/django-rest-framework,potpath/django-rest-framework,brandoncazander/django-rest-framework,qsorix/django-rest-framework,jpulec/django-rest-framework,cyberj/django-rest-framework,hnakamur/django-rest-framework,sehmaschine/django-rest-framework,aericson/django-rest-framework,HireAnEsquire/django-rest-framework,lubomir/django-rest-framework,rafaelcaricio/django-rest-framework,davesque/django-rest-framework,AlexandreProenca/django-rest-framework,werthen/django-rest-framework,antonyc/django-rest-framework,cheif/django-rest-framework,cyberj/django-rest-framework,wzbozon/django-rest-framework,HireAnEsquire/django-rest-framework,ebsaral/django-rest-framework,waytai/django-rest-framework,ossanna16/django-rest-framework,sheppard/django-rest-framework,paolopaolopaolo/django-rest-framework,ossanna16/django-rest-framework,akalipetis/django-rest-framework,waytai/django-rest-framework,wedaly/django-rest-framework,tomchristie/django-rest-framework,hunter007/django-rest-framework,buptlsl/django-rest-framework,sbellem/django-rest-framework,pombredanne/django-rest-framework,atombrella/django-rest-framework,sheppard/django-rest-framework,rafaelang/django-rest-framework,qsorix/django-rest-framework,ezheidtmann/django-rest-framework,maryokhin/django-rest-framework,paolopaolopaolo/django-rest-framework,raphaelmerx/django-rest-framework,MJafarMashhadi/django-rest-framework,abdulhaq-e/django-rest-framework,pombredanne/django-rest-framework,MJafarMashhadi/django-rest-framework,cheif/django-rest-framework,thedrow/django-rest-framework-1,uploadcare/django-rest-framework,aericson/django-rest-framework,damycra/django-rest-framework,atombrella/django-rest-framework,jpadilla/django-rest-framework,potpath/django-rest-framework,arpheno/django-rest-framework,gregmuellegger/django-rest-framework,YBJAY00000/django-rest-framework,waytai/django-rest-framework,vstoykov/django-rest-framework,kylefox/django-rest-framework,bluedazzle/django-rest-framework,ajaali/django-rest-framework,ticosax/django-rest-framework,rhblind/django-rest-framework,jerryhebert/django-rest-framework,jness/django-rest-framework,canassa/django-rest-framework,ebsaral/django-rest-framework,rafaelang/django-rest-framework,jtiai/django-rest-framework,atombrella/django-rest-framework,nhorelik/django-rest-framework,kezabelle/django-rest-framework,buptlsl/django-rest-framework,brandoncazander/django-rest-framework,andriy-s/django-rest-framework,iheitlager/django-rest-framework,YBJAY00000/django-rest-framework,VishvajitP/django-rest-framework,tigeraniya/django-rest-framework,rubendura/django-rest-framework,arpheno/django-rest-framework,nryoung/django-rest-framework,James1345/django-rest-framework,yiyocx/django-rest-framework,kgeorgy/django-rest-framework,buptlsl/django-rest-framework,tcroiset/django-rest-framework,elim/django-rest-framework,sehmaschine/django-rest-framework,kezabelle/django-rest-framework,lubomir/django-rest-framework,vstoykov/django-rest-framework,agconti/django-rest-framework,uploadcare/django-rest-framework,nryoung/django-rest-framework,johnraz/django-rest-framework,hnarayanan/django-rest-framework,andriy-s/django-rest-framework,simudream/django-rest-framework,agconti/django-rest-framework,James1345/django-rest-framework,agconti/django-rest-framework,hunter007/django-rest-framework,wwj718/django-rest-framework,ezheidtmann/django-rest-framework,tomchristie/django-rest-framework,justanr/django-rest-framework,pombredanne/django-rest-framework,AlexandreProenca/django-rest-framework,werthen/django-rest-framework,nhorelik/django-rest-framework,aericson/django-rest-framework,uruz/django-rest-framework,krinart/django-rest-framework,adambain-vokal/django-rest-framework,ambivalentno/django-rest-framework,jtiai/django-rest-framework,HireAnEsquire/django-rest-framework,nryoung/django-rest-framework,wzbozon/django-rest-framework,bluedazzle/django-rest-framework,linovia/django-rest-framework
|
from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.**
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
Add an extra explenation on how to use curl on this view.
|
from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.** or use curl:
`curl -X GET -H 'Accept: application/json' -u test:test http://localhost:8000/permissions-example`
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
|
<commit_before>from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.**
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
<commit_msg>Add an extra explenation on how to use curl on this view.<commit_after>
|
from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.** or use curl:
`curl -X GET -H 'Accept: application/json' -u test:test http://localhost:8000/permissions-example`
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
|
from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.**
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
Add an extra explenation on how to use curl on this view.from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.** or use curl:
`curl -X GET -H 'Accept: application/json' -u test:test http://localhost:8000/permissions-example`
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
|
<commit_before>from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.**
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
<commit_msg>Add an extra explenation on how to use curl on this view.<commit_after>from djangorestframework.views import View
from djangorestframework.permissions import PerUserThrottling, IsAuthenticated
from django.core.urlresolvers import reverse
class PermissionsExampleView(View):
"""
A container view for permissions examples.
"""
def get(self, request):
return [{'name': 'Throttling Example', 'url': reverse('throttled-resource')},
{'name': 'Logged in example', 'url': reverse('loggedin-resource')},]
class ThrottlingExampleView(View):
"""
A basic read-only View that has a **per-user throttle** of 10 requests per minute.
If a user exceeds the 10 requests limit within a period of one minute, the
throttle will be applied until 60 seconds have passed since the first request.
"""
permissions = ( PerUserThrottling, )
throttle = '10/min'
def get(self, request):
"""
Handle GET requests.
"""
return "Successful response to GET request because throttle is not yet active."
class LoggedInExampleView(View):
"""
You can login with **'test', 'test'.** or use curl:
`curl -X GET -H 'Accept: application/json' -u test:test http://localhost:8000/permissions-example`
"""
permissions = (IsAuthenticated, )
def get(self, request):
return 'Logged in or not?'
|
1bd2bddca6de75f3139f986cb5bb6a76320f192a
|
axel/cleaner.py
|
axel/cleaner.py
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status == 'seeding':
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
# TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status in ('seeding', 'stopped'):
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
Check stopped torrents when cleaning
|
Check stopped torrents when cleaning
|
Python
|
mit
|
craigcabrey/axel
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status == 'seeding':
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
# TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
Check stopped torrents when cleaning
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status in ('seeding', 'stopped'):
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
<commit_before>import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status == 'seeding':
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
# TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
<commit_msg>Check stopped torrents when cleaning<commit_after>
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status in ('seeding', 'stopped'):
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status == 'seeding':
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
# TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
Check stopped torrents when cleaningimport datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status in ('seeding', 'stopped'):
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
<commit_before>import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status == 'seeding':
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
# TODO: Use pb_notify instead
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
<commit_msg>Check stopped torrents when cleaning<commit_after>import datetime
import textwrap
import transmissionrpc
from axel import config
from axel import pb_notify
def clean():
transmission_client = transmissionrpc.Client(
config['transmission']['host'], port=config['transmission']['port']
)
torrents = transmission_client.get_torrents()
now = datetime.datetime.now()
time_threshold = config['transmission']['time_threshold']
for torrent in torrents:
if torrent.status in ('seeding', 'stopped'):
done = torrent.date_done
diff = now - done
if diff.days >= time_threshold:
pb_notify(
textwrap.dedent(
'''
Torrent {torrent} older than {days} days:
removing (with data)
'''.format(torrent=torrent.name, days=time_threshold)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
elif torrent.ratio >= config['transmission']['ratio_threshold']:
pb_notify(
textwrap.dedent(
'''
Torrent {0} reached threshold ratio or higher:
removing (with data)
'''.format(torrent.name)
).strip()
)
transmission_client.remove_torrent(
torrent.id, delete_data=True
)
|
3ccd8a0f65a4309d1d07f2d8d921348364586542
|
util.py
|
util.py
|
#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
|
#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
#This is a terrible method, but it works for now
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
state_abbreviations = ['AP', 'AR', 'AS', 'BR', 'CT', 'GA', 'GJ', 'HR', 'HP', 'JK', 'JH', 'KA', 'KL', 'MP', 'MH', 'MN', 'ML', 'MZ', 'NL', 'OR', 'PB', 'RJ', 'SK', 'TN', 'TR', 'UP', 'UT', 'WB']
|
Add list of abbreviations for each state
|
Add list of abbreviations for each state
This is a horrible design. Just horrible.
|
Python
|
bsd-3-clause
|
rkawauchi/IHK,rkawauchi/IHK
|
#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
Add list of abbreviations for each state
This is a horrible design. Just horrible.
|
#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
#This is a terrible method, but it works for now
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
state_abbreviations = ['AP', 'AR', 'AS', 'BR', 'CT', 'GA', 'GJ', 'HR', 'HP', 'JK', 'JH', 'KA', 'KL', 'MP', 'MH', 'MN', 'ML', 'MZ', 'NL', 'OR', 'PB', 'RJ', 'SK', 'TN', 'TR', 'UP', 'UT', 'WB']
|
<commit_before>#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
<commit_msg>Add list of abbreviations for each state
This is a horrible design. Just horrible.<commit_after>
|
#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
#This is a terrible method, but it works for now
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
state_abbreviations = ['AP', 'AR', 'AS', 'BR', 'CT', 'GA', 'GJ', 'HR', 'HP', 'JK', 'JH', 'KA', 'KL', 'MP', 'MH', 'MN', 'ML', 'MZ', 'NL', 'OR', 'PB', 'RJ', 'SK', 'TN', 'TR', 'UP', 'UT', 'WB']
|
#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
Add list of abbreviations for each state
This is a horrible design. Just horrible.#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
#This is a terrible method, but it works for now
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
state_abbreviations = ['AP', 'AR', 'AS', 'BR', 'CT', 'GA', 'GJ', 'HR', 'HP', 'JK', 'JH', 'KA', 'KL', 'MP', 'MH', 'MN', 'ML', 'MZ', 'NL', 'OR', 'PB', 'RJ', 'SK', 'TN', 'TR', 'UP', 'UT', 'WB']
|
<commit_before>#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
<commit_msg>Add list of abbreviations for each state
This is a horrible design. Just horrible.<commit_after>#http://www.gefeg.com/edifact/d03a/s3/codes/cl1h.htm
#This is a terrible method, but it works for now
state_names = ['Andhra Pradesh', 'Arunachal Pradesh', 'Assam', 'Bihar', 'Chhattisgarh', 'Goa', 'Gujarat', 'Haryana', 'Himachal Pradesh', 'Jamma and Kashmir', 'Jharkhand', 'Karnataka', 'Kerala', 'Madhya Pradesh', 'Maharashtra', 'Manipur', 'Meghalaya', 'Mizoram', 'Nagaland', 'Odisha', 'Punjab', 'Rajasthan', 'Sikkim', 'Tamil Nadu', 'Tripura', 'Uttar Pradesh', 'Uttarakhand', 'West Bengal']
state_abbreviations = ['AP', 'AR', 'AS', 'BR', 'CT', 'GA', 'GJ', 'HR', 'HP', 'JK', 'JH', 'KA', 'KL', 'MP', 'MH', 'MN', 'ML', 'MZ', 'NL', 'OR', 'PB', 'RJ', 'SK', 'TN', 'TR', 'UP', 'UT', 'WB']
|
a941218e8bacd528cff058d3afaac06e14ac7766
|
OpenPNM/PHYS/__GenericPhysics__.py
|
OpenPNM/PHYS/__GenericPhysics__.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
r'''
this uses the Washburn equation to relate pore size to entry pressure
'''
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()
|
Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)"
|
Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)"
This reverts commit 3bcc40305193f3a46de63f4345812c9c2ee4c27f [formerly e2fe152ba58cfa853637bc5bd805adf0ae9617eb] [formerly 8e549c3bfb3650f08aca2ba204d2904e53aa4ab4].
Former-commit-id: e783ac4d5946403a9d608fe9dffa42212796b402
Former-commit-id: abafc2efec64a1e360594c18b203daa4ea0f7ced
|
Python
|
mit
|
TomTranter/OpenPNM,amdouglas/OpenPNM,stadelmanma/OpenPNM,PMEAL/OpenPNM,amdouglas/OpenPNM
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
r'''
this uses the Washburn equation to relate pore size to entry pressure
'''
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)"
This reverts commit 3bcc40305193f3a46de63f4345812c9c2ee4c27f [formerly e2fe152ba58cfa853637bc5bd805adf0ae9617eb] [formerly 8e549c3bfb3650f08aca2ba204d2904e53aa4ab4].
Former-commit-id: e783ac4d5946403a9d608fe9dffa42212796b402
Former-commit-id: abafc2efec64a1e360594c18b203daa4ea0f7ced
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
r'''
this uses the Washburn equation to relate pore size to entry pressure
'''
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()<commit_msg>Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)"
This reverts commit 3bcc40305193f3a46de63f4345812c9c2ee4c27f [formerly e2fe152ba58cfa853637bc5bd805adf0ae9617eb] [formerly 8e549c3bfb3650f08aca2ba204d2904e53aa4ab4].
Former-commit-id: e783ac4d5946403a9d608fe9dffa42212796b402
Former-commit-id: abafc2efec64a1e360594c18b203daa4ea0f7ced<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
r'''
this uses the Washburn equation to relate pore size to entry pressure
'''
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)"
This reverts commit 3bcc40305193f3a46de63f4345812c9c2ee4c27f [formerly e2fe152ba58cfa853637bc5bd805adf0ae9617eb] [formerly 8e549c3bfb3650f08aca2ba204d2904e53aa4ab4].
Former-commit-id: e783ac4d5946403a9d608fe9dffa42212796b402
Former-commit-id: abafc2efec64a1e360594c18b203daa4ea0f7ced#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
r'''
this uses the Washburn equation to relate pore size to entry pressure
'''
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()<commit_msg>Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)"
This reverts commit 3bcc40305193f3a46de63f4345812c9c2ee4c27f [formerly e2fe152ba58cfa853637bc5bd805adf0ae9617eb] [formerly 8e549c3bfb3650f08aca2ba204d2904e53aa4ab4].
Former-commit-id: e783ac4d5946403a9d608fe9dffa42212796b402
Former-commit-id: abafc2efec64a1e360594c18b203daa4ea0f7ced<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run()
|
e97e3367585486671a2f30f05ce3e459c9d86f83
|
ooo.py
|
ooo.py
|
#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#(\d+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), int(title_match.group(2))
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and seen[title] and abs(issue - seen[title]) > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %d)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
|
#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#([\d.]+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), title_match.group(2)
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and issue != '0':
if seen[title]:
delta = abs(float(issue) - float(seen[title]))
if delta == 0 or delta > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %s)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
|
Handle floating issue numbers better (.5 and .1 issues)
|
Handle floating issue numbers better (.5 and .1 issues)
|
Python
|
mit
|
xchewtoyx/comicmgt,xchewtoyx/comicmgt
|
#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#(\d+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), int(title_match.group(2))
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and seen[title] and abs(issue - seen[title]) > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %d)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
Handle floating issue numbers better (.5 and .1 issues)
|
#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#([\d.]+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), title_match.group(2)
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and issue != '0':
if seen[title]:
delta = abs(float(issue) - float(seen[title]))
if delta == 0 or delta > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %s)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#(\d+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), int(title_match.group(2))
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and seen[title] and abs(issue - seen[title]) > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %d)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
<commit_msg>Handle floating issue numbers better (.5 and .1 issues)<commit_after>
|
#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#([\d.]+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), title_match.group(2)
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and issue != '0':
if seen[title]:
delta = abs(float(issue) - float(seen[title]))
if delta == 0 or delta > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %s)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
|
#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#(\d+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), int(title_match.group(2))
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and seen[title] and abs(issue - seen[title]) > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %d)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
Handle floating issue numbers better (.5 and .1 issues)#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#([\d.]+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), title_match.group(2)
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and issue != '0':
if seen[title]:
delta = abs(float(issue) - float(seen[title]))
if delta == 0 or delta > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %s)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#(\d+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), int(title_match.group(2))
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and seen[title] and abs(issue - seen[title]) > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %d)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
<commit_msg>Handle floating issue numbers better (.5 and .1 issues)<commit_after>#!/usr/bin/python
import os
import sys
import re
from collections import defaultdict
COMIC_RE = re.compile(r'^\d+ +([^#]+)#([\d.]+)')
def lines(todofile):
with open(todofile) as todolines:
for line in todolines:
title_match = COMIC_RE.match(line)
if title_match:
# (title, issue)
yield line.strip(), title_match.group(1), title_match.group(2)
def issues(todofile):
seen = defaultdict(int)
for line, title, issue in lines(todofile):
if issue and issue != '0':
if seen[title]:
delta = abs(float(issue) - float(seen[title]))
if delta == 0 or delta > 1:
yield line, seen[title]
seen[title] = issue
def main(files):
for todofile in files:
for issue, lastissue in issues(todofile):
print "%s (last seen %s)" % (issue, lastissue)
if __name__ == '__main__':
main(sys.argv[1:])
|
db7df35458ac132bb84355df1cf2a5e329ca1d84
|
quickphotos/templatetags/quickphotos_tags.py
|
quickphotos/templatetags/quickphotos_tags.py
|
from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(user, limit=None):
photos = Photo.objects.filter(user=user)
if limit is not None:
photos = photos[:limit]
return photos
|
from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
|
Add support for multiple users photos
|
Add support for multiple users photos
|
Python
|
bsd-3-clause
|
blancltd/django-quick-photos,kmlebedev/mezzanine-instagram-quickphotos
|
from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(user, limit=None):
photos = Photo.objects.filter(user=user)
if limit is not None:
photos = photos[:limit]
return photos
Add support for multiple users photos
|
from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
|
<commit_before>from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(user, limit=None):
photos = Photo.objects.filter(user=user)
if limit is not None:
photos = photos[:limit]
return photos
<commit_msg>Add support for multiple users photos<commit_after>
|
from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
|
from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(user, limit=None):
photos = Photo.objects.filter(user=user)
if limit is not None:
photos = photos[:limit]
return photos
Add support for multiple users photosfrom django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
|
<commit_before>from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(user, limit=None):
photos = Photo.objects.filter(user=user)
if limit is not None:
photos = photos[:limit]
return photos
<commit_msg>Add support for multiple users photos<commit_after>from django import template
from quickphotos.models import Photo
register = template.Library()
@register.assignment_tag
def get_latest_photos(*args, **kwargs):
limit = kwargs.pop('limit', None)
photos = Photo.objects.all()
if args:
photos = photos.filter(user__in=args)
if limit is not None:
photos = photos[:limit]
return photos
|
1250d66e60b3b429a1f5f39ecd5beda6e4074ff9
|
setup.py
|
setup.py
|
from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
},
)
|
from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
entry_points = {}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
}
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points=entry_points,
)
|
Install console script only in Py2.x.
|
Install console script only in Py2.x.
|
Python
|
bsd-3-clause
|
punchagan/cinspect,punchagan/cinspect
|
from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
},
)
Install console script only in Py2.x.
|
from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
entry_points = {}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
}
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points=entry_points,
)
|
<commit_before>from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
},
)
<commit_msg>Install console script only in Py2.x.<commit_after>
|
from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
entry_points = {}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
}
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points=entry_points,
)
|
from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
},
)
Install console script only in Py2.x.from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
entry_points = {}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
}
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points=entry_points,
)
|
<commit_before>from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
},
)
<commit_msg>Install console script only in Py2.x.<commit_after>from distutils.util import convert_path
import re
from setuptools import setup
import sys
def get_version():
with open(convert_path('cinspect/__init__.py')) as f:
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", f.read()))
return metadata.get('version', '0.1')
def get_long_description():
with open('README.md') as f:
return f.read()
packages = [
'cinspect',
'cinspect.index',
'cinspect.tests',
]
package_data = {'cinspect.tests': ['data/*.py']}
entry_points = {}
if sys.version_info.major == 2:
packages.extend([
'cinspect.vendor.clang',
])
package_data['cinspect.tests'] += ['data/*.md', 'data/*.c']
entry_points = {
"console_scripts": [
"cinspect-index = cinspect.index.writer:main",
],
}
setup(
name="cinspect",
author="Puneeth Chaganti",
author_email="punchagan@muse-amuse.in",
version=get_version(),
long_description=get_long_description(),
url = "https://github.com/punchagan/cinspect",
license="BSD",
description = "C-source introspection for packages.",
packages = packages,
package_data=package_data,
entry_points=entry_points,
)
|
62b14019420aa5fe897884d534b606fbe3c1eaa6
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.64.0.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
|
from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.105.1.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
|
Declare dependency (belongs to commit:b56fc64)
|
Declare dependency (belongs to commit:b56fc64)
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.calendar,ZeitOnline/zeit.calendar,ZeitOnline/zeit.calendar
|
from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.64.0.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
Declare dependency (belongs to commit:b56fc64)
|
from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.105.1.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.64.0.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
<commit_msg>Declare dependency (belongs to commit:b56fc64)<commit_after>
|
from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.105.1.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
|
from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.64.0.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
Declare dependency (belongs to commit:b56fc64)from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.105.1.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.64.0.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
<commit_msg>Declare dependency (belongs to commit:b56fc64)<commit_after>from setuptools import setup, find_packages
setup(
name='zeit.calendar',
version='1.6.12.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="vivi calendar",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'setuptools',
'pyramid_dogpile_cache2',
'z3c.etestbrowser',
'zeit.cms>=2.105.1.dev0',
],
entry_points={
'fanstatic.libraries': [
'zeit_calendar=zeit.calendar.browser.resources:lib',
],
}
)
|
d63460fc3b7f6baf79ea05c22712b461711fa01c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.6.dev0'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.7'
author = 'Yannick Dieter, David-Leon Pohl, Jens Janssen'
author_email = 'dieter@physik.uni-bonn.de, pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
|
Increase version 3.1.6 -> 3.1.7
|
Increase version 3.1.6 -> 3.1.7
|
Python
|
mit
|
SiLab-Bonn/pixel_clusterizer
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.6.dev0'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
Increase version 3.1.6 -> 3.1.7
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.7'
author = 'Yannick Dieter, David-Leon Pohl, Jens Janssen'
author_email = 'dieter@physik.uni-bonn.de, pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.6.dev0'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
<commit_msg>Increase version 3.1.6 -> 3.1.7<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.7'
author = 'Yannick Dieter, David-Leon Pohl, Jens Janssen'
author_email = 'dieter@physik.uni-bonn.de, pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.6.dev0'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
Increase version 3.1.6 -> 3.1.7#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.7'
author = 'Yannick Dieter, David-Leon Pohl, Jens Janssen'
author_email = 'dieter@physik.uni-bonn.de, pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.6.dev0'
author = 'David-Leon Pohl, Jens Janssen'
author_email = 'pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
<commit_msg>Increase version 3.1.6 -> 3.1.7<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code
version = '3.1.7'
author = 'Yannick Dieter, David-Leon Pohl, Jens Janssen'
author_email = 'dieter@physik.uni-bonn.de, pohl@physik.uni-bonn.de, janssen@physik.uni-bonn.de'
# requirements for core functionality from requirements.txt
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='pixel_clusterizer',
version=version,
description='A fast, generic, and easy to use clusterizer to cluster hits of a pixel matrix in Python.',
url='https://github.com/SiLab-Bonn/pixel_clusterizer',
license='GNU LESSER GENERAL PUBLIC LICENSE Version 2.1',
long_description='',
author=author,
maintainer=author,
author_email=author_email,
maintainer_email=author_email,
install_requires=install_requires,
packages=find_packages(),
include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control
package_data={'': ['README.*', 'VERSION'], 'docs': ['*'], 'examples': ['*']},
keywords=['cluster', 'clusterizer', 'pixel'],
python_requires='>=2.7',
platforms='any'
)
|
ab505406a414bf76f1921e6ab8c998ae59339228
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch, Se Yeon Kim',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
Add new author Bumping commit and mention additional contributor to Shavar
|
Add new author
Bumping commit and mention additional contributor to Shavar
|
Python
|
mpl-2.0
|
mozilla-services/shavar,mozilla-services/shavar
|
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
Add new author
Bumping commit and mention additional contributor to Shavar
|
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch, Se Yeon Kim',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
<commit_before>import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
<commit_msg>Add new author
Bumping commit and mention additional contributor to Shavar<commit_after>
|
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch, Se Yeon Kim',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
Add new author
Bumping commit and mention additional contributor to Shavarimport os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch, Se Yeon Kim',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
<commit_before>import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
<commit_msg>Add new author
Bumping commit and mention additional contributor to Shavar<commit_after>import os
from setuptools import setup, find_packages
from shavar import __version__
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
with open(os.path.join(here, 'requirements.txt')) as f:
requires = f.read()
setup(name='shavar',
version=__version__,
description='shavar',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Luke Crouch, Se Yeon Kim',
author_email='lcrouch@mozilla.com',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="shavar",
entry_points="""\
[paste.app_factory]
main = shavar:main
""",
)
|
236aca020d200a7e12b8c4659928c79b95c464cd
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
|
#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
tests_require.append('ordereddict')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
|
Add ordereddict package for Python 2.6
|
Add ordereddict package for Python 2.6
|
Python
|
bsd-3-clause
|
nikdoof/python-ts3,ryanbentley/python-ts3
|
#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
Add ordereddict package for Python 2.6
|
#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
tests_require.append('ordereddict')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
|
<commit_before>#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
<commit_msg>Add ordereddict package for Python 2.6<commit_after>
|
#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
tests_require.append('ordereddict')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
|
#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
Add ordereddict package for Python 2.6#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
tests_require.append('ordereddict')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
|
<commit_before>#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
<commit_msg>Add ordereddict package for Python 2.6<commit_after>#!/usr/bin/env python
import sys
from setuptools import setup
from ts3 import __version__
tests_require = ['mock']
if sys.version < '2.7':
tests_require.append('unittest2')
tests_require.append('ordereddict')
setup(
name="python-ts3",
version=__version__,
description="TS3 ServerQuery library for Python",
author="Andrew Willaims",
author_email="andy@tensixtyone.com",
url="https://github.com/nikdoof/python-ts3/",
keywords="teamspeak ts3 voice serverquery teamspeak3",
packages=['ts3'],
scripts=['examples/gents3privkey.py'],
test_suite='ts3.test.suite',
tests_require=tests_require,
classifiers=[
'License :: OSI Approved :: BSD License',
'Topic :: Internet',
'Topic :: Communications',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
|
7c713ac412a2895505ce64865330e55d026e8239
|
setup.py
|
setup.py
|
import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout>=3.0,<4.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
|
import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout~=3.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
|
Change timeout requirement to use semver comparator
|
Change timeout requirement to use semver comparator
|
Python
|
bsd-3-clause
|
django/asgiref
|
import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout>=3.0,<4.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
Change timeout requirement to use semver comparator
|
import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout~=3.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout>=3.0,<4.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Change timeout requirement to use semver comparator<commit_after>
|
import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout~=3.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
|
import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout>=3.0,<4.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
Change timeout requirement to use semver comparatorimport os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout~=3.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout>=3.0,<4.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Change timeout requirement to use semver comparator<commit_after>import os
from setuptools import find_packages, setup
from asgiref import __version__
# We use the README as the long_description
readme_path = os.path.join(os.path.dirname(__file__), "README.rst")
setup(
name='asgiref',
version=__version__,
url='http://github.com/django/asgiref/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description='ASGI specs, helper code, and adapters',
long_description=open(readme_path).read(),
license='BSD',
zip_safe=False,
packages=find_packages(exclude=['tests']),
include_package_data=True,
extras_require={
"tests": [
"pytest~=3.3",
"pytest-asyncio~=0.8",
],
},
install_requires=[
'async_timeout~=3.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
)
|
43b1f8c2f4d2f46817e81a3ba57e64ad2e602197
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup
from setuptools import find_packages
setup(
name="threat_intel",
version='0.0.7',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
|
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name="threat_intel",
version='0.0.8',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
|
Reorder imports and bump version
|
Reorder imports and bump version
|
Python
|
mit
|
megancarney/threat_intel,Yelp/threat_intel,SYNchroACK/threat_intel
|
# -*- coding: utf-8 -*-
from setuptools import setup
from setuptools import find_packages
setup(
name="threat_intel",
version='0.0.7',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
Reorder imports and bump version
|
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name="threat_intel",
version='0.0.8',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup
from setuptools import find_packages
setup(
name="threat_intel",
version='0.0.7',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
<commit_msg>Reorder imports and bump version<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name="threat_intel",
version='0.0.8',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup
from setuptools import find_packages
setup(
name="threat_intel",
version='0.0.7',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
Reorder imports and bump version# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name="threat_intel",
version='0.0.8',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup
from setuptools import find_packages
setup(
name="threat_intel",
version='0.0.7',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
<commit_msg>Reorder imports and bump version<commit_after># -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name="threat_intel",
version='0.0.8',
provides=['threat_intel'],
author="Yelp Security",
url='https://github.com/Yelp/threat_intel',
setup_requires='setuptools',
license='Copyright 2015 Yelp',
author_email="opensource@yelp.com",
description="Collection of the API calls for various threat intel feeds.",
packages=find_packages(),
install_requires=[
"grequests==0.2.0",
"simplejson==3.6.5",
],
)
|
9b0eb8ca3dcefe350d6fa463ca90ce0fed0c1bc7
|
setup.py
|
setup.py
|
#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
|
#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
|
Add missing classifiers: py36, dj versions.
|
Add missing classifiers: py36, dj versions.
|
Python
|
bsd-2-clause
|
beproud/bpcommons,beproud/bpcommons
|
#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
Add missing classifiers: py36, dj versions.
|
#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
<commit_msg>Add missing classifiers: py36, dj versions.<commit_after>
|
#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
|
#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
Add missing classifiers: py36, dj versions.#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
<commit_msg>Add missing classifiers: py36, dj versions.<commit_after>#!/usr/bin/env python
#:coding=utf-8:
from setuptools import setup, find_packages
from beproud.django.commons import VERSION
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='beproud.django.commons',
version=VERSION,
description='Common utilities for Django',
long_description=read('README.rst') + read('ChangeLog.rst'),
author='BeProud Inc.',
author_email='project@beproud.jp',
url='http://www.beproud.jp/',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
include_package_data=True,
packages=find_packages(),
namespace_packages=['beproud', 'beproud.django'],
install_requires=[
'Django>=1.8',
'zenhan>=0.4',
'six',
],
test_suite='tests.main',
zip_safe=False,
)
|
6037d11a8da5ea15c8de468dd730670ba10a44c6
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="License :: OSI Approved :: MIT License",
long_description=readme_string,
classifiers=[
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
|
Add trove classifier for license
|
Add trove classifier for license
The trove classifiers are listed on PyPI to help users know -- at a
glance -- what license the project uses. Helps users decide if the
library is appropriate for integration. A full list of available trove
classifiers can be found at:
https://pypi.org/pypi?%3Aaction=list_classifiers
The setuptools "license" argument is not intended to use trove
classifier notation. Simplify it to "MIT". Details can be found:
https://docs.python.org/3/distutils/setupscript.html#additional-meta-data
|
Python
|
mit
|
uiri/toml,uiri/toml
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="License :: OSI Approved :: MIT License",
long_description=readme_string,
classifiers=[
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
Add trove classifier for license
The trove classifiers are listed on PyPI to help users know -- at a
glance -- what license the project uses. Helps users decide if the
library is appropriate for integration. A full list of available trove
classifiers can be found at:
https://pypi.org/pypi?%3Aaction=list_classifiers
The setuptools "license" argument is not intended to use trove
classifier notation. Simplify it to "MIT". Details can be found:
https://docs.python.org/3/distutils/setupscript.html#additional-meta-data
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="License :: OSI Approved :: MIT License",
long_description=readme_string,
classifiers=[
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
<commit_msg>Add trove classifier for license
The trove classifiers are listed on PyPI to help users know -- at a
glance -- what license the project uses. Helps users decide if the
library is appropriate for integration. A full list of available trove
classifiers can be found at:
https://pypi.org/pypi?%3Aaction=list_classifiers
The setuptools "license" argument is not intended to use trove
classifier notation. Simplify it to "MIT". Details can be found:
https://docs.python.org/3/distutils/setupscript.html#additional-meta-data<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="License :: OSI Approved :: MIT License",
long_description=readme_string,
classifiers=[
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
Add trove classifier for license
The trove classifiers are listed on PyPI to help users know -- at a
glance -- what license the project uses. Helps users decide if the
library is appropriate for integration. A full list of available trove
classifiers can be found at:
https://pypi.org/pypi?%3Aaction=list_classifiers
The setuptools "license" argument is not intended to use trove
classifier notation. Simplify it to "MIT". Details can be found:
https://docs.python.org/3/distutils/setupscript.html#additional-meta-datatry:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="License :: OSI Approved :: MIT License",
long_description=readme_string,
classifiers=[
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
<commit_msg>Add trove classifier for license
The trove classifiers are listed on PyPI to help users know -- at a
glance -- what license the project uses. Helps users decide if the
library is appropriate for integration. A full list of available trove
classifiers can be found at:
https://pypi.org/pypi?%3Aaction=list_classifiers
The setuptools "license" argument is not intended to use trove
classifier notation. Simplify it to "MIT". Details can be found:
https://docs.python.org/3/distutils/setupscript.html#additional-meta-data<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import toml
with open("README.rst") as readme_file:
readme_string = readme_file.read()
setup(
name="toml",
version=toml.__version__,
description="Python Library for Tom's Obvious, Minimal Language",
author="Uiri Noyb",
author_email="uiri@xqz.ca",
url="https://github.com/uiri/toml",
packages=['toml'],
license="MIT",
long_description=readme_string,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6']
)
|
2849473bde1dcc01c530e8d4eb5a406e6c6faa6d
|
setup.py
|
setup.py
|
from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
|
from distutils.core import setup
with open('README.rst') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
|
Use README.rst instead of README.md as long description
|
Use README.rst instead of README.md as long description
|
Python
|
mit
|
Artanicus/python-cozify,Artanicus/python-cozify
|
from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
Use README.rst instead of README.md as long description
|
from distutils.core import setup
with open('README.rst') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
|
<commit_before>from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
<commit_msg>Use README.rst instead of README.md as long description<commit_after>
|
from distutils.core import setup
with open('README.rst') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
|
from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
Use README.rst instead of README.md as long descriptionfrom distutils.core import setup
with open('README.rst') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
|
<commit_before>from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
<commit_msg>Use README.rst instead of README.md as long description<commit_after>from distutils.core import setup
with open('README.rst') as file:
long_description = file.read()
setup(name='cozify',
version = '0.2.4',
author = 'artanicus',
author_email = 'python-cozify@nocturnal.fi',
url = 'https://github.com/Artanicus/python-cozify',
description = 'Unofficial Python bindings and helpers for the unpublished Cozify API.',
long_description = long_description,
license = 'MIT',
packages = ['cozify'],
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
)
|
b8fecc2956f4a979906191a8fa20de3839b1e8cb
|
setup.py
|
setup.py
|
from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.23',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
|
from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.24',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
|
Update meta. Bump minor version.
|
Update meta. Bump minor version.
|
Python
|
mit
|
mehdisadeghi/clashogram
|
from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.23',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
Update meta. Bump minor version.
|
from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.24',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
|
<commit_before>from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.23',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
<commit_msg>Update meta. Bump minor version.<commit_after>
|
from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.24',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
|
from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.23',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
Update meta. Bump minor version.from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.24',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
|
<commit_before>from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.23',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
<commit_msg>Update meta. Bump minor version.<commit_after>from setuptools import setup
def readme():
with open('README.rst', encoding='utf-8') as f:
return f.read()
setup(name='Clashogram',
version='0.1.24',
description='Clash of Clans war moniting for telegram channels.',
long_description=readme(),
author='Mehdi Sadeghi',
author_email='mehdi@mehdix.org',
url='https://github.com/mehdisadeghi/clashogram',
py_modules=['clashogram'],
scripts=['clashogram.py'],
entry_points={
'console_scripts': ['clashogram=clashogram:main']
},
license='MIT',
platforms='any',
install_requires=['babel',
'requests',
'jdatetime',
'pytz',
'python-dateutil',
'click'],
keywords=['games', 'telegram', 'coc', 'notification', 'clash of clans'],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: Persian',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'])
|
3540f827e12960b5ce48608249514051bb02cf61
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
'mithril.migrations',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
|
Make sure to include migrations! :hurtrealbad:
|
Make sure to include migrations! :hurtrealbad:
|
Python
|
bsd-3-clause
|
urbanairship/django-mithril,urbanairship/django-mithril
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
Make sure to include migrations! :hurtrealbad:
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
'mithril.migrations',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
<commit_msg>Make sure to include migrations! :hurtrealbad:<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
'mithril.migrations',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
Make sure to include migrations! :hurtrealbad:#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
'mithril.migrations',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
<commit_msg>Make sure to include migrations! :hurtrealbad:<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Urban Airship and Contributors
import os
import sys
import mithril
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
required = [
'netaddr==0.7.6',
]
packages = [
'mithril',
'mithril.tests',
'mithril.migrations',
]
setup(
name='django-mithril',
version='%d.%d.%d' % mithril.__version__,
description='IP Whitelisting for Django',
long_description=open('README.md').read(),
author='Chris Dickinson',
author_email='chrisdickinson@urbanairship.com',
url='http://urbanairship.github.com/django-mithril/',
packages=packages,
package_data={'': ['LICENSE']},
include_package_data=True,
install_requires=required,
license=open("LICENSE").read(),
zip_safe=False,
classifiers=(
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
),
)
|
2c2c51d5fa0594aa2d160d28c15895ece358cafe
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")])]
)
|
#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from Cython.Distutils import Extension, build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")],
cython_c_in_temp = 1)]
)
|
Use the Cython Extension class so we can place generated C files in the build dir.
|
Use the Cython Extension class so we can place generated C files in the build dir.
git-svn-id: ac1113e4705722bd5ee69cef058b32c421e857b8@491 f9120d27-2007-6f97-8312-0f4ebfa7710f
|
Python
|
lgpl-2.1
|
Kamekameha/vapoursynth,Kamekameha/vapoursynth,Kamekameha/vapoursynth,vapoursynth/vapoursynth,vapoursynth/vapoursynth,Kamekameha/vapoursynth,vapoursynth/vapoursynth,vapoursynth/vapoursynth
|
#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")])]
)
Use the Cython Extension class so we can place generated C files in the build dir.
git-svn-id: ac1113e4705722bd5ee69cef058b32c421e857b8@491 f9120d27-2007-6f97-8312-0f4ebfa7710f
|
#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from Cython.Distutils import Extension, build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")],
cython_c_in_temp = 1)]
)
|
<commit_before>#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")])]
)
<commit_msg>Use the Cython Extension class so we can place generated C files in the build dir.
git-svn-id: ac1113e4705722bd5ee69cef058b32c421e857b8@491 f9120d27-2007-6f97-8312-0f4ebfa7710f<commit_after>
|
#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from Cython.Distutils import Extension, build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")],
cython_c_in_temp = 1)]
)
|
#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")])]
)
Use the Cython Extension class so we can place generated C files in the build dir.
git-svn-id: ac1113e4705722bd5ee69cef058b32c421e857b8@491 f9120d27-2007-6f97-8312-0f4ebfa7710f#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from Cython.Distutils import Extension, build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")],
cython_c_in_temp = 1)]
)
|
<commit_before>#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")])]
)
<commit_msg>Use the Cython Extension class so we can place generated C files in the build dir.
git-svn-id: ac1113e4705722bd5ee69cef058b32c421e857b8@491 f9120d27-2007-6f97-8312-0f4ebfa7710f<commit_after>#!/usr/bin/env python3
from os import curdir, pardir
from os.path import join
from distutils.core import setup
from Cython.Distutils import Extension, build_ext
setup(
name = "VapourSynth",
description = "A frameserver for the 21st century",
url = "http://www.vapoursynth.com/",
download_url = "http://code.google.com/p/vapoursynth/",
author = "Fredrik Mellbin",
author_email = "fredrik.mellbin@gmail.com",
license = "LGPL 2.1 or later",
version = "1.0.0",
long_description = "A portable replacement for Avisynth",
platforms = "All",
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")],
libraries = ["vapoursynth"],
library_dirs = [curdir, "build"],
include_dirs = [curdir, join("src", "cython")],
cython_c_in_temp = 1)]
)
|
7640675024ecea550c253eba29ba59c2645fc509
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=[
'pandas',
'numpy'
],
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
|
from setuptools import setup
requirements = [
'pandas',
'numpy',
]
if sys.version_info < (3, 4):
requirements.append('enum34')
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=requirements,
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
|
Add enum34 dependency for older packages
|
Add enum34 dependency for older packages
|
Python
|
mit
|
johanvdw/niche_vlaanderen
|
from setuptools import setup
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=[
'pandas',
'numpy'
],
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
Add enum34 dependency for older packages
|
from setuptools import setup
requirements = [
'pandas',
'numpy',
]
if sys.version_info < (3, 4):
requirements.append('enum34')
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=requirements,
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
|
<commit_before>from setuptools import setup
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=[
'pandas',
'numpy'
],
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
<commit_msg>Add enum34 dependency for older packages<commit_after>
|
from setuptools import setup
requirements = [
'pandas',
'numpy',
]
if sys.version_info < (3, 4):
requirements.append('enum34')
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=requirements,
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
|
from setuptools import setup
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=[
'pandas',
'numpy'
],
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
Add enum34 dependency for older packagesfrom setuptools import setup
requirements = [
'pandas',
'numpy',
]
if sys.version_info < (3, 4):
requirements.append('enum34')
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=requirements,
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
|
<commit_before>from setuptools import setup
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=[
'pandas',
'numpy'
],
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
<commit_msg>Add enum34 dependency for older packages<commit_after>from setuptools import setup
requirements = [
'pandas',
'numpy',
]
if sys.version_info < (3, 4):
requirements.append('enum34')
setup(name='niche_vlaanderen',
version="0.0.1",
description='NICHE Vlaanderen',
url='https://github.com/INBO/niche_vlaanderen',
author='Johan Van de Wauw',
author_email='johan.vandewauw@inbo.be',
license='MIT',
install_requires=requirements,
classifiers=[
'Development Status :: 1 - Planning',
'Inteded Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
tests_require=['pytest'],
)
|
a5f5231e8e55b7052e2525876b60f939598edc91
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
)
|
#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
python_requires='>=3.6',
)
|
Mark as requiring at least Python 3.6
|
Mark as requiring at least Python 3.6
|
Python
|
mit
|
jingw/pyhdfs,jingw/pyhdfs
|
#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
)
Mark as requiring at least Python 3.6
|
#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
python_requires='>=3.6',
)
|
<commit_before>#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
)
<commit_msg>Mark as requiring at least Python 3.6<commit_after>
|
#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
python_requires='>=3.6',
)
|
#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
)
Mark as requiring at least Python 3.6#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
python_requires='>=3.6',
)
|
<commit_before>#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
)
<commit_msg>Mark as requiring at least Python 3.6<commit_after>#!/usr/bin/env python3
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="99jingw@gmail.com",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
python_requires='>=3.6',
)
|
f9af94cca14665703f56f867083b6a4ff72fa42d
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
|
import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'pytest-remove-stale-bytecode',
'WebTest >= 2.0.14'],
),
)
|
Remove stale bytecode when running tests.
|
Remove stale bytecode when running tests.
|
Python
|
bsd-3-clause
|
taschini/morepath,morepath/morepath,faassen/morepath
|
import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
Remove stale bytecode when running tests.
|
import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'pytest-remove-stale-bytecode',
'WebTest >= 2.0.14'],
),
)
|
<commit_before>import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
<commit_msg>Remove stale bytecode when running tests.<commit_after>
|
import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'pytest-remove-stale-bytecode',
'WebTest >= 2.0.14'],
),
)
|
import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
Remove stale bytecode when running tests.import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'pytest-remove-stale-bytecode',
'WebTest >= 2.0.14'],
),
)
|
<commit_before>import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
<commit_msg>Remove stale bytecode when running tests.<commit_after>import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='morepath',
version='0.10.dev0',
description="A micro web-framework with superpowers",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
url='http://morepath.readthedocs.org',
license="BSD",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta'
],
keywords="web wsgi routing morepath",
install_requires=[
'setuptools',
'webob >= 1.3.1',
'venusifork >= 2.0a2',
'reg >= 0.9.2'
],
extras_require = dict(
test=['pytest >= 2.5.2',
'py >= 1.4.20',
'pytest-cov',
'pytest-remove-stale-bytecode',
'WebTest >= 2.0.14'],
),
)
|
43f5221c3ca8f6e22a292c92f0ba02d36c5b03a1
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
python_requires=">=3.5",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
|
Add Python 3 Only classifier and python_requires >= 3.5
|
Add Python 3 Only classifier and python_requires >= 3.5
in setup.py
|
Python
|
mit
|
renanivo/pytest-testdox
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
Add Python 3 Only classifier and python_requires >= 3.5
in setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
python_requires=">=3.5",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
<commit_msg>Add Python 3 Only classifier and python_requires >= 3.5
in setup.py<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
python_requires=">=3.5",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
Add Python 3 Only classifier and python_requires >= 3.5
in setup.py#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
python_requires=">=3.5",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
<commit_msg>Add Python 3 Only classifier and python_requires >= 3.5
in setup.py<commit_after>#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path) as f:
return f.read()
setup(
name='pytest-testdox',
version='1.2.1',
description='A testdox format reporter for pytest',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/pytest-testdox',
keywords='pytest testdox test report bdd',
install_requires=[
'pytest>=3.7.0',
],
packages=['pytest_testdox'],
python_requires=">=3.5",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
entry_points={
'pytest11': [
'testdox = pytest_testdox.plugin',
],
},
)
|
2695171199a1992fae699ff6f54ef97ab104fb57
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
|
from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
entry_points={'console_scripts': ['pyinstrument = pyinstrument.__main__:main']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
|
Add pyinstrument as a commandline entry point
|
Add pyinstrument as a commandline entry point
|
Python
|
bsd-3-clause
|
edx/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,edx/pyinstrument
|
from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
Add pyinstrument as a commandline entry point
|
from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
entry_points={'console_scripts': ['pyinstrument = pyinstrument.__main__:main']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
<commit_msg>Add pyinstrument as a commandline entry point<commit_after>
|
from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
entry_points={'console_scripts': ['pyinstrument = pyinstrument.__main__:main']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
|
from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
Add pyinstrument as a commandline entry pointfrom setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
entry_points={'console_scripts': ['pyinstrument = pyinstrument.__main__:main']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
<commit_msg>Add pyinstrument as a commandline entry point<commit_after>from setuptools import setup, find_packages
setup(
name="pyinstrument",
packages=['pyinstrument'],
version="0.12",
description="A call stack profiler for Python. Inspired by Apple's Instruments.app",
author='Joe Rickerby',
author_email='joerick@mac.com',
url='https://github.com/joerick/pyinstrument',
keywords=['profiling', 'profile', 'profiler', 'cpu', 'time'],
include_package_data=True,
entry_points={'console_scripts': ['pyinstrument = pyinstrument.__main__:main']},
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Testing',
]
)
|
5b97d56f8c8f751896b00c6cb1b3f360ea06ecf2
|
setup.py
|
setup.py
|
import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operation System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Logging',
]
)
|
import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
]
)
|
Use trove classifiers from official list
|
Use trove classifiers from official list
(aligned with https://pypi.python.org/pypi?%3Aaction=list_classifiers)
|
Python
|
bsd-2-clause
|
bbc/python-json-logger,madzak/python-json-logger
|
import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operation System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Logging',
]
)
Use trove classifiers from official list
(aligned with https://pypi.python.org/pypi?%3Aaction=list_classifiers)
|
import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
]
)
|
<commit_before>import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operation System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Logging',
]
)
<commit_msg>Use trove classifiers from official list
(aligned with https://pypi.python.org/pypi?%3Aaction=list_classifiers)<commit_after>
|
import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
]
)
|
import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operation System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Logging',
]
)
Use trove classifiers from official list
(aligned with https://pypi.python.org/pypi?%3Aaction=list_classifiers)import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
]
)
|
<commit_before>import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operation System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Logging',
]
)
<commit_msg>Use trove classifiers from official list
(aligned with https://pypi.python.org/pypi?%3Aaction=list_classifiers)<commit_after>import sys
if sys.version_info < (2, 7):
print sys.stderr, "{}: need Python 2.7 or later.".format(sys.argv[0])
print sys.stderror, "Your python is {}".format(sys.version)
sys.exit(1)
from setuptools import setup
setup(
name = "python-json-logger",
version = "0.0.1",
url = "http://github.com/madzak/python-json-logger",
license = "BSD",
description = "A python library adding a json log formatter",
author = "Zakaria Zajac",
author_email = "zak@madzak.com",
package_dir = {'': 'src'},
packages = [''],
test_suite = "tests.tests",
install_requires = ['setuptools'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
]
)
|
37cae8e6f793f8a13a4c13d5333e8a0c9290f42a
|
setup.py
|
setup.py
|
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov'],
cmdclass={"test": PyTest},
)
|
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov', 'pycodestyle'],
cmdclass={"test": PyTest},
)
|
Add missing tests dependency pycodestyle.
|
Add missing tests dependency pycodestyle.
|
Python
|
mit
|
Kentzo/git-archive-all
|
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov'],
cmdclass={"test": PyTest},
)
Add missing tests dependency pycodestyle.
|
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov', 'pycodestyle'],
cmdclass={"test": PyTest},
)
|
<commit_before>import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov'],
cmdclass={"test": PyTest},
)
<commit_msg>Add missing tests dependency pycodestyle.<commit_after>
|
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov', 'pycodestyle'],
cmdclass={"test": PyTest},
)
|
import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov'],
cmdclass={"test": PyTest},
)
Add missing tests dependency pycodestyle.import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov', 'pycodestyle'],
cmdclass={"test": PyTest},
)
|
<commit_before>import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov'],
cmdclass={"test": PyTest},
)
<commit_msg>Add missing tests dependency pycodestyle.<commit_after>import re
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
# Parse the version from the file.
verstrline = open('git_archive_all.py', "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
verstr = mo.group(1)
else:
raise RuntimeError("Unable to find version string in git_archive_all.py")
class PyTest(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import shlex
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
version=verstr,
py_modules=['git_archive_all'],
entry_points={'console_scripts': 'git-archive-all=git_archive_all:main'},
tests_require=['pytest', 'pytest-cov', 'pycodestyle'],
cmdclass={"test": PyTest},
)
|
08ac25ba28162ba68256dc6b6a47afa9af080c3c
|
setup.py
|
setup.py
|
import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers = [
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
**extra_setup
)
|
import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
keywords=['terminal', 'tty', 'curses', 'formatting'],
**extra_setup
)
|
Add some keywords to improve PyPI search ranking.
|
Add some keywords to improve PyPI search ranking.
|
Python
|
mit
|
tartley/blessings,jquast/blessed,erikrose/blessings
|
import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers = [
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
**extra_setup
)
Add some keywords to improve PyPI search ranking.
|
import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
keywords=['terminal', 'tty', 'curses', 'formatting'],
**extra_setup
)
|
<commit_before>import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers = [
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
**extra_setup
)
<commit_msg>Add some keywords to improve PyPI search ranking.<commit_after>
|
import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
keywords=['terminal', 'tty', 'curses', 'formatting'],
**extra_setup
)
|
import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers = [
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
**extra_setup
)
Add some keywords to improve PyPI search ranking.import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
keywords=['terminal', 'tty', 'curses', 'formatting'],
**extra_setup
)
|
<commit_before>import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers = [
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
**extra_setup
)
<commit_msg>Add some keywords to improve PyPI search ranking.<commit_after>import sys
from setuptools import setup, find_packages
extra_setup = {}
if sys.version_info >= (3,):
extra_setup['use_2to3'] = True
setup(
name='blessings',
version='1.0',
description='A thin, practical wrapper around terminal formatting, positioning, and more',
long_description=open('README.rst').read(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
keywords=['terminal', 'tty', 'curses', 'formatting'],
**extra_setup
)
|
10be723bf9396c3e513d09ce2a16a3aee0eebe36
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os
import shutil
import tempfile
# First ensure that we build the package so that 2to3 gets executed
self.reinitialize_command('build')
self.run_command('build')
build_cmd = self.get_finalized_command('build')
new_path = os.path.abspath(build_cmd.build_lib)
# Copy the build to a temporary directory for the purposes of testing
# - this avoids creating pyc and __pycache__ directories inside the
# build directory
tmp_dir = tempfile.mkdtemp(prefix='reprojection-test-')
testing_path = os.path.join(tmp_dir, os.path.basename(new_path))
shutil.copytree(new_path, testing_path)
import sys
import subprocess
errno = subprocess.call([sys.executable, os.path.abspath('runtests.py')], cwd=testing_path)
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
Make sure the package is built before it is tested
|
Make sure the package is built before it is tested
|
Python
|
bsd-3-clause
|
barentsen/reproject,mwcraig/reproject,astrofrog/reproject,astrofrog/reproject,bsipocz/reproject,barentsen/reproject,barentsen/reproject,astrofrog/reproject,bsipocz/reproject,mwcraig/reproject
|
#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
Make sure the package is built before it is tested
|
#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os
import shutil
import tempfile
# First ensure that we build the package so that 2to3 gets executed
self.reinitialize_command('build')
self.run_command('build')
build_cmd = self.get_finalized_command('build')
new_path = os.path.abspath(build_cmd.build_lib)
# Copy the build to a temporary directory for the purposes of testing
# - this avoids creating pyc and __pycache__ directories inside the
# build directory
tmp_dir = tempfile.mkdtemp(prefix='reprojection-test-')
testing_path = os.path.join(tmp_dir, os.path.basename(new_path))
shutil.copytree(new_path, testing_path)
import sys
import subprocess
errno = subprocess.call([sys.executable, os.path.abspath('runtests.py')], cwd=testing_path)
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
<commit_before>#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
<commit_msg>Make sure the package is built before it is tested<commit_after>
|
#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os
import shutil
import tempfile
# First ensure that we build the package so that 2to3 gets executed
self.reinitialize_command('build')
self.run_command('build')
build_cmd = self.get_finalized_command('build')
new_path = os.path.abspath(build_cmd.build_lib)
# Copy the build to a temporary directory for the purposes of testing
# - this avoids creating pyc and __pycache__ directories inside the
# build directory
tmp_dir = tempfile.mkdtemp(prefix='reprojection-test-')
testing_path = os.path.join(tmp_dir, os.path.basename(new_path))
shutil.copytree(new_path, testing_path)
import sys
import subprocess
errno = subprocess.call([sys.executable, os.path.abspath('runtests.py')], cwd=testing_path)
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
Make sure the package is built before it is tested#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os
import shutil
import tempfile
# First ensure that we build the package so that 2to3 gets executed
self.reinitialize_command('build')
self.run_command('build')
build_cmd = self.get_finalized_command('build')
new_path = os.path.abspath(build_cmd.build_lib)
# Copy the build to a temporary directory for the purposes of testing
# - this avoids creating pyc and __pycache__ directories inside the
# build directory
tmp_dir = tempfile.mkdtemp(prefix='reprojection-test-')
testing_path = os.path.join(tmp_dir, os.path.basename(new_path))
shutil.copytree(new_path, testing_path)
import sys
import subprocess
errno = subprocess.call([sys.executable, os.path.abspath('runtests.py')], cwd=testing_path)
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
<commit_before>#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
<commit_msg>Make sure the package is built before it is tested<commit_after>#!/usr/bin/env python
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os
import shutil
import tempfile
# First ensure that we build the package so that 2to3 gets executed
self.reinitialize_command('build')
self.run_command('build')
build_cmd = self.get_finalized_command('build')
new_path = os.path.abspath(build_cmd.build_lib)
# Copy the build to a temporary directory for the purposes of testing
# - this avoids creating pyc and __pycache__ directories inside the
# build directory
tmp_dir = tempfile.mkdtemp(prefix='reprojection-test-')
testing_path = os.path.join(tmp_dir, os.path.basename(new_path))
shutil.copytree(new_path, testing_path)
import sys
import subprocess
errno = subprocess.call([sys.executable, os.path.abspath('runtests.py')], cwd=testing_path)
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='thomas.robitaille@gmail.com',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
049ddc3422579f4e3f7047d61484d67a6d9dd826
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='pyepub',
version='0.2.3',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
|
from distutils.core import setup
setup(
name='pyepub',
version='0.2.4',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
|
Correct a list comprehension which caused node comments to be added to spine and manifest json
|
Correct a list comprehension which caused node comments to be added to spine and manifest json
|
Python
|
mit
|
gabalese/pyepub
|
from distutils.core import setup
setup(
name='pyepub',
version='0.2.3',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
Correct a list comprehension which caused node comments to be added to spine and manifest json
|
from distutils.core import setup
setup(
name='pyepub',
version='0.2.4',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
|
<commit_before>from distutils.core import setup
setup(
name='pyepub',
version='0.2.3',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
<commit_msg>Correct a list comprehension which caused node comments to be added to spine and manifest json<commit_after>
|
from distutils.core import setup
setup(
name='pyepub',
version='0.2.4',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
|
from distutils.core import setup
setup(
name='pyepub',
version='0.2.3',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
Correct a list comprehension which caused node comments to be added to spine and manifest jsonfrom distutils.core import setup
setup(
name='pyepub',
version='0.2.4',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
|
<commit_before>from distutils.core import setup
setup(
name='pyepub',
version='0.2.3',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
<commit_msg>Correct a list comprehension which caused node comments to be added to spine and manifest json<commit_after>from distutils.core import setup
setup(
name='pyepub',
version='0.2.4',
packages=['pyepub'],
url='http://blog.alese.it/pyepub',
license='MIT',
author='Gabriele Alese',
author_email='gabriele@alese.it',
description='Enhanced EPUB library'
)
|
162b4c689e14042d043c6de03311fb6049ed94c1
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=find_packages(),
zip_safe=False
)
|
from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=['pyimagediet'],
zip_safe=False
)
|
Remove tests from built package
|
Remove tests from built package
|
Python
|
mit
|
samastur/pyimagediet
|
from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=find_packages(),
zip_safe=False
)
Remove tests from built package
|
from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=['pyimagediet'],
zip_safe=False
)
|
<commit_before>from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=find_packages(),
zip_safe=False
)
<commit_msg>Remove tests from built package<commit_after>
|
from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=['pyimagediet'],
zip_safe=False
)
|
from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=find_packages(),
zip_safe=False
)
Remove tests from built packagefrom setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=['pyimagediet'],
zip_safe=False
)
|
<commit_before>from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=find_packages(),
zip_safe=False
)
<commit_msg>Remove tests from built package<commit_after>from setuptools import setup, find_packages
long_description = '''\
pyimagediet is a Python wrapper around image optimisations tools used to
reduce images size without loss of visual quality. It provides a uniform
interface to tools, easy configuration and integration.
It works on images in JPEG, GIF and PNG formats and will leave others
unchanged.'''
setup(
author="Marko Samastur",
author_email="markos@gaivo.net",
name='pyimagediet',
version='0.5',
description='Python wrapper around image optimisations tools',
long_description=long_description,
url='https://github.com/samastur/pyimagediet/',
platforms=['OS Independent'],
license='MIT License',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
install_requires=[
'PyYAML>=3.11',
'python-magic>=0.4.10',
],
include_package_data=True,
packages=['pyimagediet'],
zip_safe=False
)
|
9fa2cfee9d182eefe918c0303c7966667d9673c9
|
tasks.py
|
tasks.py
|
from os.path import join
from invoke import Collection
from invocations import docs as _docs, testing
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
ns = Collection(testing.test, docs=docs, www=www)
|
from os.path import join
from invoke import Collection, task
from invocations import docs as _docs
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Until we move to spec-based testing
@task
def test(ctx):
ctx.run("python test.py --verbose")
ns = Collection(test, docs=docs, www=www)
|
Replace incorrect import of generic test runner w/ custom task
|
Replace incorrect import of generic test runner w/ custom task
|
Python
|
lgpl-2.1
|
zpzgone/paramiko,CptLemming/paramiko,digitalquacks/paramiko,reaperhulk/paramiko,Automatic/paramiko,selboo/paramiko,jorik041/paramiko,zarr12steven/paramiko,davidbistolas/paramiko,toby82/paramiko,torkil/paramiko,rcorrieri/paramiko,dorianpula/paramiko,thusoy/paramiko,ameily/paramiko,redixin/paramiko,SebastianDeiss/paramiko,paramiko/paramiko,mirrorcoder/paramiko,jaraco/paramiko,remram44/paramiko,varunarya10/paramiko,esc/paramiko,fvicente/paramiko,mhdaimi/paramiko,thisch/paramiko,dlitz/paramiko,anadigi/paramiko
|
from os.path import join
from invoke import Collection
from invocations import docs as _docs, testing
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
ns = Collection(testing.test, docs=docs, www=www)
Replace incorrect import of generic test runner w/ custom task
|
from os.path import join
from invoke import Collection, task
from invocations import docs as _docs
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Until we move to spec-based testing
@task
def test(ctx):
ctx.run("python test.py --verbose")
ns = Collection(test, docs=docs, www=www)
|
<commit_before>from os.path import join
from invoke import Collection
from invocations import docs as _docs, testing
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
ns = Collection(testing.test, docs=docs, www=www)
<commit_msg>Replace incorrect import of generic test runner w/ custom task<commit_after>
|
from os.path import join
from invoke import Collection, task
from invocations import docs as _docs
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Until we move to spec-based testing
@task
def test(ctx):
ctx.run("python test.py --verbose")
ns = Collection(test, docs=docs, www=www)
|
from os.path import join
from invoke import Collection
from invocations import docs as _docs, testing
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
ns = Collection(testing.test, docs=docs, www=www)
Replace incorrect import of generic test runner w/ custom taskfrom os.path import join
from invoke import Collection, task
from invocations import docs as _docs
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Until we move to spec-based testing
@task
def test(ctx):
ctx.run("python test.py --verbose")
ns = Collection(test, docs=docs, www=www)
|
<commit_before>from os.path import join
from invoke import Collection
from invocations import docs as _docs, testing
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
ns = Collection(testing.test, docs=docs, www=www)
<commit_msg>Replace incorrect import of generic test runner w/ custom task<commit_after>from os.path import join
from invoke import Collection, task
from invocations import docs as _docs
d = 'sites'
# Usage doc/API site (published as docs.paramiko.org)
path = join(d, 'docs')
docs = Collection.from_module(_docs, name='docs', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Main/about/changelog site ((www.)?paramiko.org)
path = join(d, 'www')
www = Collection.from_module(_docs, name='www', config={
'sphinx.source': path,
'sphinx.target': join(path, '_build'),
})
# Until we move to spec-based testing
@task
def test(ctx):
ctx.run("python test.py --verbose")
ns = Collection(test, docs=docs, www=www)
|
e39f6f310bf9d65e21aa3a923a836c836b6bcd2e
|
tests.py
|
tests.py
|
from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
|
from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
class BasicPreconditionTests (TestCase):
def test_basic_precondition(self):
@preconditions(lambda i: isinstance(i, int) and i > 0)
def uint_pred(i):
return i-1
# Not greater than 0:
self.assertRaises(PreconditionError, uint_pred, 0)
# Not an int:
self.assertRaises(PreconditionError, uint_pred, 1.0)
# Test a successful call:
self.assertEqual(0, uint_pred(1))
def test_relational_precondition(self):
@preconditions(lambda a, b: a < b)
def inc_range(a, b):
return range(a, b)
self.assertRaises(PreconditionError, inc_range, 3, 3)
self.assertRaises(PreconditionError, inc_range, 5, 3)
self.assertEqual([3, 4], inc_range(3, 5))
|
Add a basic precondition, and a relational precondition.
|
Add a basic precondition, and a relational precondition.
|
Python
|
mit
|
nejucomo/preconditions
|
from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
Add a basic precondition, and a relational precondition.
|
from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
class BasicPreconditionTests (TestCase):
def test_basic_precondition(self):
@preconditions(lambda i: isinstance(i, int) and i > 0)
def uint_pred(i):
return i-1
# Not greater than 0:
self.assertRaises(PreconditionError, uint_pred, 0)
# Not an int:
self.assertRaises(PreconditionError, uint_pred, 1.0)
# Test a successful call:
self.assertEqual(0, uint_pred(1))
def test_relational_precondition(self):
@preconditions(lambda a, b: a < b)
def inc_range(a, b):
return range(a, b)
self.assertRaises(PreconditionError, inc_range, 3, 3)
self.assertRaises(PreconditionError, inc_range, 5, 3)
self.assertEqual([3, 4], inc_range(3, 5))
|
<commit_before>from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
<commit_msg>Add a basic precondition, and a relational precondition.<commit_after>
|
from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
class BasicPreconditionTests (TestCase):
def test_basic_precondition(self):
@preconditions(lambda i: isinstance(i, int) and i > 0)
def uint_pred(i):
return i-1
# Not greater than 0:
self.assertRaises(PreconditionError, uint_pred, 0)
# Not an int:
self.assertRaises(PreconditionError, uint_pred, 1.0)
# Test a successful call:
self.assertEqual(0, uint_pred(1))
def test_relational_precondition(self):
@preconditions(lambda a, b: a < b)
def inc_range(a, b):
return range(a, b)
self.assertRaises(PreconditionError, inc_range, 3, 3)
self.assertRaises(PreconditionError, inc_range, 5, 3)
self.assertEqual([3, 4], inc_range(3, 5))
|
from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
Add a basic precondition, and a relational precondition.from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
class BasicPreconditionTests (TestCase):
def test_basic_precondition(self):
@preconditions(lambda i: isinstance(i, int) and i > 0)
def uint_pred(i):
return i-1
# Not greater than 0:
self.assertRaises(PreconditionError, uint_pred, 0)
# Not an int:
self.assertRaises(PreconditionError, uint_pred, 1.0)
# Test a successful call:
self.assertEqual(0, uint_pred(1))
def test_relational_precondition(self):
@preconditions(lambda a, b: a < b)
def inc_range(a, b):
return range(a, b)
self.assertRaises(PreconditionError, inc_range, 3, 3)
self.assertRaises(PreconditionError, inc_range, 5, 3)
self.assertEqual([3, 4], inc_range(3, 5))
|
<commit_before>from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
<commit_msg>Add a basic precondition, and a relational precondition.<commit_after>from unittest import TestCase
from preconditions import PreconditionError, preconditions
class InvalidPreconditionTests (TestCase):
def test_varargs(self):
self.assertRaises(PreconditionError, preconditions, lambda *a: True)
def test_kwargs(self):
self.assertRaises(PreconditionError, preconditions, lambda **kw: True)
def test_unknown_nondefault_param(self):
# The preconditions refer to "x" but are applied to "a, b", so
# "x" is unknown:
p = preconditions(lambda x: True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
def test_default_masks_param(self):
# Preconditions may have defaults as a hack to bind local
# variables (such as when declared syntactically inside loops),
# but this "closure hack" must not mask application function
# parameter names:
p = preconditions(lambda a, b='a stored value': True)
self.assertRaises(PreconditionError, p, lambda a, b: a+b)
class BasicPreconditionTests (TestCase):
def test_basic_precondition(self):
@preconditions(lambda i: isinstance(i, int) and i > 0)
def uint_pred(i):
return i-1
# Not greater than 0:
self.assertRaises(PreconditionError, uint_pred, 0)
# Not an int:
self.assertRaises(PreconditionError, uint_pred, 1.0)
# Test a successful call:
self.assertEqual(0, uint_pred(1))
def test_relational_precondition(self):
@preconditions(lambda a, b: a < b)
def inc_range(a, b):
return range(a, b)
self.assertRaises(PreconditionError, inc_range, 3, 3)
self.assertRaises(PreconditionError, inc_range, 5, 3)
self.assertEqual([3, 4], inc_range(3, 5))
|
a72a7f95af4e8ac03affe5e33bda0a3d57e29fd6
|
examples/connect4/connect4.py
|
examples/connect4/connect4.py
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def check(self, column):
vectors = ((1, 0), (1, 1), (0, 1), (-1, 1))
for i in xrange(4):
row = []
for j in xrange(-3, 4):
try:
if column + j*vectors[i][0] >= 0 and len(self.pieces[column]) - 1 + j*vectors[i][1] >= 0:
row.append(self.pieces[column + j*vectors[i][0]][len(self.pieces[column]) - 1 + j*vectors[i][1]])
else:
row.append(None)
except IndexError:
row.append(None)
for j in xrange(4):
if row[j] == row[j + 1] == row[j + 2] == row[j + 3] is not None:
return row[j]
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return self.check(column)
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
|
Check for winner after every move
|
Check for winner after every move
|
Python
|
mit
|
tysonzero/py-ann
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
Check for winner after every move
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def check(self, column):
vectors = ((1, 0), (1, 1), (0, 1), (-1, 1))
for i in xrange(4):
row = []
for j in xrange(-3, 4):
try:
if column + j*vectors[i][0] >= 0 and len(self.pieces[column]) - 1 + j*vectors[i][1] >= 0:
row.append(self.pieces[column + j*vectors[i][0]][len(self.pieces[column]) - 1 + j*vectors[i][1]])
else:
row.append(None)
except IndexError:
row.append(None)
for j in xrange(4):
if row[j] == row[j + 1] == row[j + 2] == row[j + 3] is not None:
return row[j]
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return self.check(column)
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
|
<commit_before>class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
<commit_msg>Check for winner after every move<commit_after>
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def check(self, column):
vectors = ((1, 0), (1, 1), (0, 1), (-1, 1))
for i in xrange(4):
row = []
for j in xrange(-3, 4):
try:
if column + j*vectors[i][0] >= 0 and len(self.pieces[column]) - 1 + j*vectors[i][1] >= 0:
row.append(self.pieces[column + j*vectors[i][0]][len(self.pieces[column]) - 1 + j*vectors[i][1]])
else:
row.append(None)
except IndexError:
row.append(None)
for j in xrange(4):
if row[j] == row[j + 1] == row[j + 2] == row[j + 3] is not None:
return row[j]
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return self.check(column)
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
Check for winner after every moveclass Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def check(self, column):
vectors = ((1, 0), (1, 1), (0, 1), (-1, 1))
for i in xrange(4):
row = []
for j in xrange(-3, 4):
try:
if column + j*vectors[i][0] >= 0 and len(self.pieces[column]) - 1 + j*vectors[i][1] >= 0:
row.append(self.pieces[column + j*vectors[i][0]][len(self.pieces[column]) - 1 + j*vectors[i][1]])
else:
row.append(None)
except IndexError:
row.append(None)
for j in xrange(4):
if row[j] == row[j + 1] == row[j + 2] == row[j + 3] is not None:
return row[j]
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return self.check(column)
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
|
<commit_before>class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
<commit_msg>Check for winner after every move<commit_after>class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def check(self, column):
vectors = ((1, 0), (1, 1), (0, 1), (-1, 1))
for i in xrange(4):
row = []
for j in xrange(-3, 4):
try:
if column + j*vectors[i][0] >= 0 and len(self.pieces[column]) - 1 + j*vectors[i][1] >= 0:
row.append(self.pieces[column + j*vectors[i][0]][len(self.pieces[column]) - 1 + j*vectors[i][1]])
else:
row.append(None)
except IndexError:
row.append(None)
for j in xrange(4):
if row[j] == row[j + 1] == row[j + 2] == row[j + 3] is not None:
return row[j]
def move(self, column):
for i in xrange(column, column + 7):
if len(self.pieces[i % 7]) < 6:
self.pieces[i % 7].append(self.turn)
self.turn = 1 - self.turn
return self.check(column)
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n|' or '|'
for piece_column in self.pieces:
try:
output += piece_column[5 - i] and 'X|' or 'O|'
except IndexError:
output += ' |'
output += '\n 0 1 2 3 4 5 6 '
return output
def start():
connect4 = Connect4()
while True:
print connect4
connect4.move(column=input('{0}\'s turn: '.format(connect4.turn and 'X' or 'O')))
|
5691238ca1ce78d2a48619c61402681acef9dc7e
|
examples/sequencealignment.py
|
examples/sequencealignment.py
|
from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence("what a beautiful day".split())
b = Sequence("what a disappointingly bad day".split())
print "Sequence A:", a
print "Sequence B:", b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print "Encoded A:", aEncoded
print "Encoded B:", bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print "Alignment score:", alignment.score
print "Percent identity:", alignment.percentIdentity()
print
|
from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence('what a beautiful day'.split())
b = Sequence('what a disappointingly bad day'.split())
print 'Sequence A:', a
print 'Sequence B:', b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print 'Encoded A:', aEncoded
print 'Encoded B:', bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print 'Alignment score:', alignment.score
print 'Percent identity:', alignment.percentIdentity()
print
|
Update the sequence alignment example.
|
Update the sequence alignment example.
|
Python
|
bsd-3-clause
|
eseraygun/python-entities,eseraygun/python-alignment
|
from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence("what a beautiful day".split())
b = Sequence("what a disappointingly bad day".split())
print "Sequence A:", a
print "Sequence B:", b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print "Encoded A:", aEncoded
print "Encoded B:", bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print "Alignment score:", alignment.score
print "Percent identity:", alignment.percentIdentity()
print
Update the sequence alignment example.
|
from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence('what a beautiful day'.split())
b = Sequence('what a disappointingly bad day'.split())
print 'Sequence A:', a
print 'Sequence B:', b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print 'Encoded A:', aEncoded
print 'Encoded B:', bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print 'Alignment score:', alignment.score
print 'Percent identity:', alignment.percentIdentity()
print
|
<commit_before>from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence("what a beautiful day".split())
b = Sequence("what a disappointingly bad day".split())
print "Sequence A:", a
print "Sequence B:", b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print "Encoded A:", aEncoded
print "Encoded B:", bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print "Alignment score:", alignment.score
print "Percent identity:", alignment.percentIdentity()
print
<commit_msg>Update the sequence alignment example.<commit_after>
|
from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence('what a beautiful day'.split())
b = Sequence('what a disappointingly bad day'.split())
print 'Sequence A:', a
print 'Sequence B:', b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print 'Encoded A:', aEncoded
print 'Encoded B:', bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print 'Alignment score:', alignment.score
print 'Percent identity:', alignment.percentIdentity()
print
|
from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence("what a beautiful day".split())
b = Sequence("what a disappointingly bad day".split())
print "Sequence A:", a
print "Sequence B:", b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print "Encoded A:", aEncoded
print "Encoded B:", bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print "Alignment score:", alignment.score
print "Percent identity:", alignment.percentIdentity()
print
Update the sequence alignment example.from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence('what a beautiful day'.split())
b = Sequence('what a disappointingly bad day'.split())
print 'Sequence A:', a
print 'Sequence B:', b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print 'Encoded A:', aEncoded
print 'Encoded B:', bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print 'Alignment score:', alignment.score
print 'Percent identity:', alignment.percentIdentity()
print
|
<commit_before>from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence("what a beautiful day".split())
b = Sequence("what a disappointingly bad day".split())
print "Sequence A:", a
print "Sequence B:", b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print "Encoded A:", aEncoded
print "Encoded B:", bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print "Alignment score:", alignment.score
print "Percent identity:", alignment.percentIdentity()
print
<commit_msg>Update the sequence alignment example.<commit_after>from alignment.sequence import Sequence
from alignment.vocabulary import Vocabulary
from alignment.sequencealigner import SimpleScoring, GlobalSequenceAligner
# Create sequences to be aligned.
a = Sequence('what a beautiful day'.split())
b = Sequence('what a disappointingly bad day'.split())
print 'Sequence A:', a
print 'Sequence B:', b
print
# Create a vocabulary and encode the sequences.
v = Vocabulary()
aEncoded = v.encodeSequence(a)
bEncoded = v.encodeSequence(b)
print 'Encoded A:', aEncoded
print 'Encoded B:', bEncoded
print
# Create a scoring and align the sequences using global aligner.
scoring = SimpleScoring(2, -1)
aligner = GlobalSequenceAligner(scoring, -2)
score, encodeds = aligner.align(aEncoded, bEncoded, backtrace=True)
# Iterate over optimal alignments and print them.
for encoded in encodeds:
alignment = v.decodeSequenceAlignment(encoded)
print alignment
print 'Alignment score:', alignment.score
print 'Percent identity:', alignment.percentIdentity()
print
|
8715324d1c466d617fb832841413025b464b7012
|
onitu/drivers/dropbox/tests/driver.py
|
onitu/drivers/dropbox/tests/driver.py
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.libDropbox import LibDropbox
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.google_drive = LibDrive(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.dropboxDriver import dropboxDriver
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.dropbox = dropboxDriver(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
|
Fix the imports in the tests of dropbox
|
Fix the imports in the tests of dropbox
|
Python
|
mit
|
onitu/onitu,onitu/onitu,onitu/onitu
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.libDropbox import LibDropbox
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.google_drive = LibDrive(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
Fix the imports in the tests of dropbox
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.dropboxDriver import dropboxDriver
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.dropbox = dropboxDriver(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
|
<commit_before>import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.libDropbox import LibDropbox
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.google_drive = LibDrive(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
<commit_msg>Fix the imports in the tests of dropbox<commit_after>
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.dropboxDriver import dropboxDriver
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.dropbox = dropboxDriver(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.libDropbox import LibDropbox
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.google_drive = LibDrive(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
Fix the imports in the tests of dropboximport os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.dropboxDriver import dropboxDriver
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.dropbox = dropboxDriver(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
|
<commit_before>import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.libDropbox import LibDropbox
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.google_drive = LibDrive(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
<commit_msg>Fix the imports in the tests of dropbox<commit_after>import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils.tempdirs import dirs
from onitu.drivers.dropbox.dropboxDriver import dropboxDriver
class Driver(TestDriver):
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
if 'key' not in options:
options['key'] = "38jd72msqedx5n9"
if 'secret' not in options:
options['secret'] = "g4favy0bgjstt2w"
if 'changes_timer' not in options:
options['changes_timer'] = 600.0
self.dropbox = dropboxDriver(options)
super(Driver, self).__init__('dropbox',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
self.drop.delete_file('/')
def mkdir(self, subdirs):
self.drop.create_dir(subdirs+"/toto")
def write(self, filename, content):
metadata = {"size": len(content), "filename": filename}
self.drop.upload_chunk(metadata, 0, content, len(content))
def generate(self, filename, size):
self.write(filename, os.urandom(size))
def unlink(self, filename):
self.drop.delete_file(filename)
def checksum(self, filename):
return "LOL----LOL"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.