commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a7340268cd5bf19d81668595c2cec5e707873737
|
tests/test_objects.py
|
tests/test_objects.py
|
import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
class A(object):
@cached_property
def prop(self):
return 7
a = A()
assert a.prop == 7
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
|
import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
calls = [0]
class A(object):
@cached_property
def prop(self):
calls[0] += 1
return 7
a = A()
assert a.prop == 7
assert a.prop == 7
assert calls == [1]
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
|
Test that @cached_property really caches
|
Test that @cached_property really caches
|
Python
|
bsd-3-clause
|
musicpax/funcy,ma-ric/funcy,Suor/funcy
|
import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
class A(object):
@cached_property
def prop(self):
return 7
a = A()
assert a.prop == 7
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
Test that @cached_property really caches
|
import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
calls = [0]
class A(object):
@cached_property
def prop(self):
calls[0] += 1
return 7
a = A()
assert a.prop == 7
assert a.prop == 7
assert calls == [1]
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
|
<commit_before>import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
class A(object):
@cached_property
def prop(self):
return 7
a = A()
assert a.prop == 7
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
<commit_msg>Test that @cached_property really caches<commit_after>
|
import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
calls = [0]
class A(object):
@cached_property
def prop(self):
calls[0] += 1
return 7
a = A()
assert a.prop == 7
assert a.prop == 7
assert calls == [1]
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
|
import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
class A(object):
@cached_property
def prop(self):
return 7
a = A()
assert a.prop == 7
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
Test that @cached_property really cachesimport sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
calls = [0]
class A(object):
@cached_property
def prop(self):
calls[0] += 1
return 7
a = A()
assert a.prop == 7
assert a.prop == 7
assert calls == [1]
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
|
<commit_before>import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
class A(object):
@cached_property
def prop(self):
return 7
a = A()
assert a.prop == 7
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
<commit_msg>Test that @cached_property really caches<commit_after>import sys
from funcy.objects import *
### @cached_property
def test_set_cached_property():
calls = [0]
class A(object):
@cached_property
def prop(self):
calls[0] += 1
return 7
a = A()
assert a.prop == 7
assert a.prop == 7
assert calls == [1]
a.prop = 42
assert a.prop == 42
### Monkey tests
def test_monkey():
class A(object):
def f(self):
return 7
@monkey(A)
def f(self):
return f.original(self) * 6
assert A().f() == 42
def test_monkey_property():
class A(object):
pass
@monkey(A)
@property
def prop(self):
return 42
assert A().prop == 42
def f(x):
return x
def test_monkey_module():
this_module = sys.modules[__name__]
@monkey(this_module)
def f(x):
return f.original(x) * 2
assert f(21) == 42
|
5a4eca867882299ce3ad8b0cc15c39b4ada61c0a
|
PublicWebServicesAPI_AND_servercommandScripts/addInfoToCSVreport.py
|
PublicWebServicesAPI_AND_servercommandScripts/addInfoToCSVreport.py
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# #TODO Add a note about which report this example works with.
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))
csv_reader = reader(stdin, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 2:
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes"))
print(", ".join(row))
line_count += 1
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
Add relevant notes and documentation to addInfoToCSVReport.py
|
Update: Add relevant notes and documentation to addInfoToCSVReport.py
|
Python
|
mit
|
PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# #TODO Add a note about which report this example works with.
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))
csv_reader = reader(stdin, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 2:
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes"))
print(", ".join(row))
line_count += 1
Update: Add relevant notes and documentation to addInfoToCSVReport.py
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
<commit_before>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# #TODO Add a note about which report this example works with.
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))
csv_reader = reader(stdin, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 2:
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes"))
print(", ".join(row))
line_count += 1
<commit_msg>Update: Add relevant notes and documentation to addInfoToCSVReport.py<commit_after>
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# #TODO Add a note about which report this example works with.
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))
csv_reader = reader(stdin, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 2:
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes"))
print(", ".join(row))
line_count += 1
Update: Add relevant notes and documentation to addInfoToCSVReport.py#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
<commit_before>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# #TODO Add a note about which report this example works with.
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))
csv_reader = reader(stdin, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 2:
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes"))
print(", ".join(row))
line_count += 1
<commit_msg>Update: Add relevant notes and documentation to addInfoToCSVReport.py<commit_after>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
587f6c77153235e3defcc6b0b6598634e1ee2828
|
lib/sqlalchemy/dialects/__init__.py
|
lib/sqlalchemy/dialects/__init__.py
|
# dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
dialect = "dialect"
except:
module = __import__("sqlalchemy.dialects.firebird").dialects
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
except ImportError:
return None
module = getattr(module, dialect)
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
|
# dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
except ImportError:
module = __import__("sqlalchemy.dialects.firebird").dialects
module = getattr(module, dialect)
elif dialect == "sybase":
try:
module = __import__("sqlalchemy_sybase")
except ImportError:
module = __import__("sqlalchemy.dialects.sybase").dialects
module = getattr(module, dialect)
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
module = getattr(module, dialect)
except ImportError:
return None
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
|
Load external firebird or sybase dialect if available
|
Load external firebird or sybase dialect if available
Fixes: #5318
Extension of I1660abb11c02656fbf388f2f9c4257075111be58
Change-Id: I32b678430497327f9b08f821bd345a2557e34b1f
|
Python
|
mit
|
monetate/sqlalchemy,j5int/sqlalchemy,j5int/sqlalchemy,zzzeek/sqlalchemy,sqlalchemy/sqlalchemy,monetate/sqlalchemy
|
# dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
dialect = "dialect"
except:
module = __import__("sqlalchemy.dialects.firebird").dialects
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
except ImportError:
return None
module = getattr(module, dialect)
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
Load external firebird or sybase dialect if available
Fixes: #5318
Extension of I1660abb11c02656fbf388f2f9c4257075111be58
Change-Id: I32b678430497327f9b08f821bd345a2557e34b1f
|
# dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
except ImportError:
module = __import__("sqlalchemy.dialects.firebird").dialects
module = getattr(module, dialect)
elif dialect == "sybase":
try:
module = __import__("sqlalchemy_sybase")
except ImportError:
module = __import__("sqlalchemy.dialects.sybase").dialects
module = getattr(module, dialect)
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
module = getattr(module, dialect)
except ImportError:
return None
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
|
<commit_before># dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
dialect = "dialect"
except:
module = __import__("sqlalchemy.dialects.firebird").dialects
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
except ImportError:
return None
module = getattr(module, dialect)
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
<commit_msg>Load external firebird or sybase dialect if available
Fixes: #5318
Extension of I1660abb11c02656fbf388f2f9c4257075111be58
Change-Id: I32b678430497327f9b08f821bd345a2557e34b1f<commit_after>
|
# dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
except ImportError:
module = __import__("sqlalchemy.dialects.firebird").dialects
module = getattr(module, dialect)
elif dialect == "sybase":
try:
module = __import__("sqlalchemy_sybase")
except ImportError:
module = __import__("sqlalchemy.dialects.sybase").dialects
module = getattr(module, dialect)
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
module = getattr(module, dialect)
except ImportError:
return None
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
|
# dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
dialect = "dialect"
except:
module = __import__("sqlalchemy.dialects.firebird").dialects
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
except ImportError:
return None
module = getattr(module, dialect)
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
Load external firebird or sybase dialect if available
Fixes: #5318
Extension of I1660abb11c02656fbf388f2f9c4257075111be58
Change-Id: I32b678430497327f9b08f821bd345a2557e34b1f# dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
except ImportError:
module = __import__("sqlalchemy.dialects.firebird").dialects
module = getattr(module, dialect)
elif dialect == "sybase":
try:
module = __import__("sqlalchemy_sybase")
except ImportError:
module = __import__("sqlalchemy.dialects.sybase").dialects
module = getattr(module, dialect)
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
module = getattr(module, dialect)
except ImportError:
return None
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
|
<commit_before># dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
dialect = "dialect"
except:
module = __import__("sqlalchemy.dialects.firebird").dialects
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
except ImportError:
return None
module = getattr(module, dialect)
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
<commit_msg>Load external firebird or sybase dialect if available
Fixes: #5318
Extension of I1660abb11c02656fbf388f2f9c4257075111be58
Change-Id: I32b678430497327f9b08f821bd345a2557e34b1f<commit_after># dialects/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
"firebird",
"mssql",
"mysql",
"oracle",
"postgresql",
"sqlite",
"sybase",
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
if dialect == "firebird":
try:
module = __import__("sqlalchemy_firebird")
except ImportError:
module = __import__("sqlalchemy.dialects.firebird").dialects
module = getattr(module, dialect)
elif dialect == "sybase":
try:
module = __import__("sqlalchemy_sybase")
except ImportError:
module = __import__("sqlalchemy.dialects.sybase").dialects
module = getattr(module, dialect)
else:
module = __import__("sqlalchemy.dialects.%s" % (dialect,)).dialects
module = getattr(module, dialect)
except ImportError:
return None
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
plugins = util.PluginLoader("sqlalchemy.plugins")
|
f4c01d85eb5a3873ea80e24b3dae50bd3ab87f4a
|
llvmlite/binding/linker.py
|
llvmlite/binding/linker.py
|
from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
dst.verify()
src.verify()
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
|
from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
|
Remove unnecessary verify. We should let user run the verification instead of doing it in linkage.
|
Remove unnecessary verify.
We should let user run the verification instead of doing it in linkage.
|
Python
|
bsd-2-clause
|
numba/llvmlite,numba/llvmlite,numba/llvmlite,numba/llvmlite
|
from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
dst.verify()
src.verify()
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
Remove unnecessary verify.
We should let user run the verification instead of doing it in linkage.
|
from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
|
<commit_before>from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
dst.verify()
src.verify()
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
<commit_msg>Remove unnecessary verify.
We should let user run the verification instead of doing it in linkage.<commit_after>
|
from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
|
from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
dst.verify()
src.verify()
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
Remove unnecessary verify.
We should let user run the verification instead of doing it in linkage.from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
|
<commit_before>from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
dst.verify()
src.verify()
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
<commit_msg>Remove unnecessary verify.
We should let user run the verification instead of doing it in linkage.<commit_after>from __future__ import print_function, absolute_import
from ctypes import c_int, c_char_p, POINTER
from . import ffi
def link_modules(dst, src):
with ffi.OutputString() as outerr:
err = ffi.lib.LLVMPY_LinkModules(dst, src, outerr)
# The underlying module was destroyed
src.detach()
if err:
raise RuntimeError(str(outerr))
ffi.lib.LLVMPY_LinkModules.argtypes = [
ffi.LLVMModuleRef,
ffi.LLVMModuleRef,
POINTER(c_char_p),
]
ffi.lib.LLVMPY_LinkModules.restype = c_int
|
60b039aabb94c1e5a50bb19bb7267a0fd3ceaa86
|
mollie/api/objects/list.py
|
mollie/api/objects/list.py
|
from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
def get_offset(self):
if 'offset' not in self:
return None
return self['offset']
|
from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
|
Drop obsoleted support for offset.
|
Drop obsoleted support for offset.
|
Python
|
bsd-2-clause
|
mollie/mollie-api-python
|
from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
def get_offset(self):
if 'offset' not in self:
return None
return self['offset']
Drop obsoleted support for offset.
|
from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
|
<commit_before>from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
def get_offset(self):
if 'offset' not in self:
return None
return self['offset']
<commit_msg>Drop obsoleted support for offset.<commit_after>
|
from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
|
from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
def get_offset(self):
if 'offset' not in self:
return None
return self['offset']
Drop obsoleted support for offset.from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
|
<commit_before>from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
def get_offset(self):
if 'offset' not in self:
return None
return self['offset']
<commit_msg>Drop obsoleted support for offset.<commit_after>from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def __len__(self):
"""Return the count field."""
return int(self['count'])
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator interface."""
self.current = None
return self
def __next__(self):
"""Implement iterator interface."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
|
ba211a0037aa26d5d1fc9cb7a0de55a46b481a82
|
prometapi/bicikeljproxy/management/commands/bicikelj_fetch_citybikes.py
|
prometapi/bicikeljproxy/management/commands/bicikelj_fetch_citybikes.py
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
print 'fetch', 'http://api.citybik.es/bicikelj.json'
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
print [foreign_data]
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
|
Print data when citybikes throws error.
|
Print data when citybikes throws error.
|
Python
|
agpl-3.0
|
zejn/prometapi,izacus/prometapi,zejn/prometapi,izacus/prometapi
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
Print data when citybikes throws error.
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
print 'fetch', 'http://api.citybik.es/bicikelj.json'
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
print [foreign_data]
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
<commit_msg>Print data when citybikes throws error.<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
print 'fetch', 'http://api.citybik.es/bicikelj.json'
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
print [foreign_data]
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
Print data when citybikes throws error.from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
print 'fetch', 'http://api.citybik.es/bicikelj.json'
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
print [foreign_data]
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
<commit_msg>Print data when citybikes throws error.<commit_after>from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
import os
import sys
class Command(BaseCommand):
help = 'Fetch bicikelj XMLs and store them in order not to storm on official servers'
def handle(self, *args, **options):
from prometapi.bicikeljproxy.models import fetch_xmls, BicikeljData, convert_citybikes
import simplejson
import datetime
import urllib
#timestamp, data = fetch_xmls()
print 'fetch', 'http://api.citybik.es/bicikelj.json'
foreign_data = urllib.urlopen('http://api.citybik.es/bicikelj.json').read()
print [foreign_data]
data = convert_citybikes(foreign_data)
timestamp = datetime.datetime.now()
b = BicikeljData(timestamp=timestamp, json_data=simplejson.dumps(data))
b.save()
|
fbbee24a71f840131748bc8ca1cadc7759c58d52
|
molo/core/content_import/api/urls.py
|
molo/core/content_import/api/urls.py
|
from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ChooseParentView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]
|
from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ArticleChooserView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]
|
Fix name of Aritlce Parent Chooser view as used by the URLs
|
Fix name of Aritlce Parent Chooser view as used by the URLs
|
Python
|
bsd-2-clause
|
praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo
|
from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ChooseParentView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]Fix name of Aritlce Parent Chooser view as used by the URLs
|
from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ArticleChooserView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]
|
<commit_before>from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ChooseParentView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]<commit_msg>Fix name of Aritlce Parent Chooser view as used by the URLs<commit_after>
|
from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ArticleChooserView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]
|
from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ChooseParentView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]Fix name of Aritlce Parent Chooser view as used by the URLsfrom django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ArticleChooserView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]
|
<commit_before>from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ChooseParentView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]<commit_msg>Fix name of Aritlce Parent Chooser view as used by the URLs<commit_after>from django.conf.urls import url
from molo.core.content_import.api import admin_views
urlpatterns = [
url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"),
url(r"^parent/$", admin_views.ArticleChooserView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"),
]
|
ce97f7677a84db351e3d2cadf01691fc879a8fbe
|
profile/files/applications/report/fedora/check_updates.py
|
profile/files/applications/report/fedora/check_updates.py
|
dnf_failure = False
base = dnf.Base()
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
|
dnf_failure = False
base = dnf.Base()
base.conf.substitutions.update_from_etc("/")
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
|
Read custom variables (Required to support CentOS Stream)
|
Read custom variables (Required to support CentOS Stream)
|
Python
|
apache-2.0
|
norcams/himlar,tanzr/himlar,raykrist/himlar,norcams/himlar,norcams/himlar,TorLdre/himlar,tanzr/himlar,tanzr/himlar,raykrist/himlar,TorLdre/himlar,norcams/himlar,TorLdre/himlar,raykrist/himlar,mikaeld66/himlar,mikaeld66/himlar,mikaeld66/himlar,tanzr/himlar,TorLdre/himlar,raykrist/himlar,tanzr/himlar,norcams/himlar,raykrist/himlar,mikaeld66/himlar,mikaeld66/himlar,TorLdre/himlar
|
dnf_failure = False
base = dnf.Base()
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
Read custom variables (Required to support CentOS Stream)
|
dnf_failure = False
base = dnf.Base()
base.conf.substitutions.update_from_etc("/")
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
|
<commit_before>dnf_failure = False
base = dnf.Base()
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
<commit_msg>Read custom variables (Required to support CentOS Stream)<commit_after>
|
dnf_failure = False
base = dnf.Base()
base.conf.substitutions.update_from_etc("/")
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
|
dnf_failure = False
base = dnf.Base()
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
Read custom variables (Required to support CentOS Stream)dnf_failure = False
base = dnf.Base()
base.conf.substitutions.update_from_etc("/")
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
|
<commit_before>dnf_failure = False
base = dnf.Base()
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
<commit_msg>Read custom variables (Required to support CentOS Stream)<commit_after>dnf_failure = False
base = dnf.Base()
base.conf.substitutions.update_from_etc("/")
try:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
except:
dnf_failure = True
if dnf_failure:
pkg_output = -1
else:
pkg_output = len(upgrades)
|
56af29b28ff236c4380a11e4c498ae2c61917e62
|
tests/test_level_standards.py
|
tests/test_level_standards.py
|
import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 8
assert logging.VERBOSE == 9
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
|
import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 5
assert logging.VERBOSE == 7
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
|
Fix standards tests for log levels
|
Fix standards tests for log levels
|
Python
|
mit
|
six8/logging-levels
|
import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 8
assert logging.VERBOSE == 9
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
Fix standards tests for log levels
|
import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 5
assert logging.VERBOSE == 7
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
|
<commit_before>import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 8
assert logging.VERBOSE == 9
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
<commit_msg>Fix standards tests for log levels<commit_after>
|
import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 5
assert logging.VERBOSE == 7
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
|
import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 8
assert logging.VERBOSE == 9
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
Fix standards tests for log levelsimport sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 5
assert logging.VERBOSE == 7
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
|
<commit_before>import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 8
assert logging.VERBOSE == 9
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
<commit_msg>Fix standards tests for log levels<commit_after>import sys
def test_level_standards(logging, log):
"""
Ensure that the standard log levels work
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.TRACE == 5
assert logging.VERBOSE == 7
log.verbose("I've said too much")
assert log.last() == ['VERBOSE', "I've said too much"]
log.trace("But I haven't said enough")
assert log.last() == ['TRACE', "But I haven't said enough"]
def test_standards_suppressed(logging, log):
"""
Ensure that the suppressed log level includes
the suppressed exception
"""
import logging_levels.standards
del sys.modules['logging_levels.standards'] # Force module to re-import
assert logging.SUPPRESSED
try:
raise Exception('Suppress this')
except:
log.suppressed('Suppressed exception')
lines = ''.join(log.readlines())
assert lines.startswith('SUPPRESSED:')
assert 'Exception: Suppress this' in lines
|
5085e2f8c97ecab6617b4f7b0c8250095d47b22d
|
boardinghouse/templatetags/boardinghouse.py
|
boardinghouse/templatetags/boardinghouse.py
|
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import get_schema_model
Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(pk):
try:
return Schema.objects.get(pk=pk).name
except Schema.DoesNotExist:
return "no schema"
|
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import _get_schema
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(schema):
try:
return _get_schema(schema).name
except AttributeError:
return "no schema"
|
Remove a database access from the template tag.
|
Remove a database access from the template tag.
--HG--
branch : schema-invitations
|
Python
|
bsd-3-clause
|
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
|
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import get_schema_model
Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(pk):
try:
return Schema.objects.get(pk=pk).name
except Schema.DoesNotExist:
return "no schema"
Remove a database access from the template tag.
--HG--
branch : schema-invitations
|
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import _get_schema
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(schema):
try:
return _get_schema(schema).name
except AttributeError:
return "no schema"
|
<commit_before>from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import get_schema_model
Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(pk):
try:
return Schema.objects.get(pk=pk).name
except Schema.DoesNotExist:
return "no schema"
<commit_msg>Remove a database access from the template tag.
--HG--
branch : schema-invitations<commit_after>
|
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import _get_schema
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(schema):
try:
return _get_schema(schema).name
except AttributeError:
return "no schema"
|
from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import get_schema_model
Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(pk):
try:
return Schema.objects.get(pk=pk).name
except Schema.DoesNotExist:
return "no schema"
Remove a database access from the template tag.
--HG--
branch : schema-invitationsfrom django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import _get_schema
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(schema):
try:
return _get_schema(schema).name
except AttributeError:
return "no schema"
|
<commit_before>from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import get_schema_model
Schema = get_schema_model()
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(pk):
try:
return Schema.objects.get(pk=pk).name
except Schema.DoesNotExist:
return "no schema"
<commit_msg>Remove a database access from the template tag.
--HG--
branch : schema-invitations<commit_after>from django import template
from ..schema import is_shared_model as _is_shared_model
from ..schema import _get_schema
register = template.Library()
@register.filter
def is_schema_aware(obj):
return obj and not _is_shared_model(obj)
@register.filter
def is_shared_model(obj):
return obj and _is_shared_model(obj)
@register.filter
def schema_name(schema):
try:
return _get_schema(schema).name
except AttributeError:
return "no schema"
|
b2b6874e044b6984f8b0a300963ff340df62abc9
|
ycml/transformers/base.py
|
ycml/transformers/base.py
|
import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, y=None):
timer = Timer()
transformed = self._transform(X, y)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
|
import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, **kwargs):
timer = Timer()
transformed = self._transform(X, **kwargs)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
|
Add `**kwargs` support to transformers
|
Add `**kwargs` support to transformers
|
Python
|
apache-2.0
|
skylander86/ycml
|
import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, y=None):
timer = Timer()
transformed = self._transform(X, y)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
Add `**kwargs` support to transformers
|
import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, **kwargs):
timer = Timer()
transformed = self._transform(X, **kwargs)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
|
<commit_before>import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, y=None):
timer = Timer()
transformed = self._transform(X, y)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
<commit_msg>Add `**kwargs` support to transformers<commit_after>
|
import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, **kwargs):
timer = Timer()
transformed = self._transform(X, **kwargs)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
|
import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, y=None):
timer = Timer()
transformed = self._transform(X, y)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
Add `**kwargs` support to transformersimport logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, **kwargs):
timer = Timer()
transformed = self._transform(X, **kwargs)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
|
<commit_before>import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, y=None):
timer = Timer()
transformed = self._transform(X, y)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
<commit_msg>Add `**kwargs` support to transformers<commit_after>import logging
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from ..utils import Timer
__all__ = ['PureTransformer', 'identity']
logger = logging.getLogger(__name__)
# Helper class. A transformer that only does transformation and does not need to fit any internal parameters.
class PureTransformer(BaseEstimator, TransformerMixin):
def __init__(self, nparray=True, **kwargs):
super(PureTransformer, self).__init__(**kwargs)
self.nparray = nparray
#end def
def fit(self, X, y=None, **fit_params): return self
def transform(self, X, **kwargs):
timer = Timer()
transformed = self._transform(X, **kwargs)
if self.nparray: transformed = np.array(transformed)
logger.debug('Done <{}> transformation{}.'.format(type(self).__name__, timer))
return transformed
#end def
def _transform(self, X, y=None):
return [self.transform_one(row) for row in X]
#end def
def transform_one(self, x):
raise NotImplementedError('transform_one method needs to be implemented.')
#end class
def identity(x): return x
|
538ae3b96399e207e38bdf53bdd1c8f738b82e33
|
tests/test_pagination.py
|
tests/test_pagination.py
|
from hn import HN
hn = HN()
def test_pagination_top():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 pages.
"""
assert len(hn.get_stories(page_limit=2)) == 2 * 30
def test_pagination_newest():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
assert len(hn.get_stories(story_type='newest', page_limit=3)) == 3 * 30
def test_pagination_best():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 5 pages.
"""
assert len(hn.get_stories(story_type='best', page_limit=5)) == 5 * 30
|
from hn import HN
hn = HN()
def test_pagination_top_for_2_pages():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 page.
"""
stories = hn.get_stories(page_limit=2)
assert len(stories) == 2 * 30
def test_pagination_newest_for_3_pages():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
stories = hn.get_stories(story_type='newest', page_limit=3)
assert len(stories) == 3 * 30
def test_pagination_best_for_5_pages():
"""
This test checks if the pagination works for the best stories by comparing
number of stories in 5 pages.
"""
stories = hn.get_stories(story_type='best', page_limit=5)
assert len(stories) == 5 * 30
def test_pagination_top_for_0_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 0 page.
"""
stories = hn.get_stories(page_limit=0)
assert len(stories) == 1 * 30
def test_pagination_top_for_negative_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in negative page.
"""
stories = hn.get_stories(page_limit=-10)
assert len(stories) == 1 * 30
|
Add test cases for unexpected page_limit
|
Add test cases for unexpected page_limit
|
Python
|
mit
|
brunocappelli/HackerNewsAPI,karan/HackerNewsAPI,brunocappelli/HackerNewsAPI,karan/HackerNewsAPI,brunocappelli/HackerNewsAPI
|
from hn import HN
hn = HN()
def test_pagination_top():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 pages.
"""
assert len(hn.get_stories(page_limit=2)) == 2 * 30
def test_pagination_newest():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
assert len(hn.get_stories(story_type='newest', page_limit=3)) == 3 * 30
def test_pagination_best():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 5 pages.
"""
assert len(hn.get_stories(story_type='best', page_limit=5)) == 5 * 30
Add test cases for unexpected page_limit
|
from hn import HN
hn = HN()
def test_pagination_top_for_2_pages():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 page.
"""
stories = hn.get_stories(page_limit=2)
assert len(stories) == 2 * 30
def test_pagination_newest_for_3_pages():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
stories = hn.get_stories(story_type='newest', page_limit=3)
assert len(stories) == 3 * 30
def test_pagination_best_for_5_pages():
"""
This test checks if the pagination works for the best stories by comparing
number of stories in 5 pages.
"""
stories = hn.get_stories(story_type='best', page_limit=5)
assert len(stories) == 5 * 30
def test_pagination_top_for_0_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 0 page.
"""
stories = hn.get_stories(page_limit=0)
assert len(stories) == 1 * 30
def test_pagination_top_for_negative_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in negative page.
"""
stories = hn.get_stories(page_limit=-10)
assert len(stories) == 1 * 30
|
<commit_before>from hn import HN
hn = HN()
def test_pagination_top():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 pages.
"""
assert len(hn.get_stories(page_limit=2)) == 2 * 30
def test_pagination_newest():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
assert len(hn.get_stories(story_type='newest', page_limit=3)) == 3 * 30
def test_pagination_best():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 5 pages.
"""
assert len(hn.get_stories(story_type='best', page_limit=5)) == 5 * 30
<commit_msg>Add test cases for unexpected page_limit<commit_after>
|
from hn import HN
hn = HN()
def test_pagination_top_for_2_pages():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 page.
"""
stories = hn.get_stories(page_limit=2)
assert len(stories) == 2 * 30
def test_pagination_newest_for_3_pages():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
stories = hn.get_stories(story_type='newest', page_limit=3)
assert len(stories) == 3 * 30
def test_pagination_best_for_5_pages():
"""
This test checks if the pagination works for the best stories by comparing
number of stories in 5 pages.
"""
stories = hn.get_stories(story_type='best', page_limit=5)
assert len(stories) == 5 * 30
def test_pagination_top_for_0_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 0 page.
"""
stories = hn.get_stories(page_limit=0)
assert len(stories) == 1 * 30
def test_pagination_top_for_negative_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in negative page.
"""
stories = hn.get_stories(page_limit=-10)
assert len(stories) == 1 * 30
|
from hn import HN
hn = HN()
def test_pagination_top():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 pages.
"""
assert len(hn.get_stories(page_limit=2)) == 2 * 30
def test_pagination_newest():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
assert len(hn.get_stories(story_type='newest', page_limit=3)) == 3 * 30
def test_pagination_best():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 5 pages.
"""
assert len(hn.get_stories(story_type='best', page_limit=5)) == 5 * 30
Add test cases for unexpected page_limitfrom hn import HN
hn = HN()
def test_pagination_top_for_2_pages():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 page.
"""
stories = hn.get_stories(page_limit=2)
assert len(stories) == 2 * 30
def test_pagination_newest_for_3_pages():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
stories = hn.get_stories(story_type='newest', page_limit=3)
assert len(stories) == 3 * 30
def test_pagination_best_for_5_pages():
"""
This test checks if the pagination works for the best stories by comparing
number of stories in 5 pages.
"""
stories = hn.get_stories(story_type='best', page_limit=5)
assert len(stories) == 5 * 30
def test_pagination_top_for_0_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 0 page.
"""
stories = hn.get_stories(page_limit=0)
assert len(stories) == 1 * 30
def test_pagination_top_for_negative_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in negative page.
"""
stories = hn.get_stories(page_limit=-10)
assert len(stories) == 1 * 30
|
<commit_before>from hn import HN
hn = HN()
def test_pagination_top():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 pages.
"""
assert len(hn.get_stories(page_limit=2)) == 2 * 30
def test_pagination_newest():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
assert len(hn.get_stories(story_type='newest', page_limit=3)) == 3 * 30
def test_pagination_best():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 5 pages.
"""
assert len(hn.get_stories(story_type='best', page_limit=5)) == 5 * 30
<commit_msg>Add test cases for unexpected page_limit<commit_after>from hn import HN
hn = HN()
def test_pagination_top_for_2_pages():
"""
This test checks if the pagination works for the front page by comparing
number of stories in 2 page.
"""
stories = hn.get_stories(page_limit=2)
assert len(stories) == 2 * 30
def test_pagination_newest_for_3_pages():
"""
This test checks if the pagination works for the best page by comparing
number of stories in 3 pages.
"""
stories = hn.get_stories(story_type='newest', page_limit=3)
assert len(stories) == 3 * 30
def test_pagination_best_for_5_pages():
"""
This test checks if the pagination works for the best stories by comparing
number of stories in 5 pages.
"""
stories = hn.get_stories(story_type='best', page_limit=5)
assert len(stories) == 5 * 30
def test_pagination_top_for_0_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in 0 page.
"""
stories = hn.get_stories(page_limit=0)
assert len(stories) == 1 * 30
def test_pagination_top_for_negative_pages():
"""
This test checks if the pagination works for the top stories by comparing
number of stories in negative page.
"""
stories = hn.get_stories(page_limit=-10)
assert len(stories) == 1 * 30
|
7d894c2faa2d9dfac8eec5389ecb500a8f5f8e63
|
bin/pymodules/apitest/jscomponent.py
|
bin/pymodules/apitest/jscomponent.py
|
import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
|
import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
try:
ent.placeable
except:
pass
else:
ctx['placeable'] = ent.placeable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
|
Add placeable to javascript context
|
Add placeable to javascript context
|
Python
|
apache-2.0
|
realXtend/tundra,antont/tundra,realXtend/tundra,AlphaStaxLLC/tundra,AlphaStaxLLC/tundra,pharos3d/tundra,AlphaStaxLLC/tundra,pharos3d/tundra,BogusCurry/tundra,AlphaStaxLLC/tundra,jesterKing/naali,pharos3d/tundra,antont/tundra,BogusCurry/tundra,BogusCurry/tundra,realXtend/tundra,jesterKing/naali,antont/tundra,antont/tundra,jesterKing/naali,jesterKing/naali,antont/tundra,BogusCurry/tundra,jesterKing/naali,pharos3d/tundra,realXtend/tundra,AlphaStaxLLC/tundra,realXtend/tundra,antont/tundra,BogusCurry/tundra,AlphaStaxLLC/tundra,BogusCurry/tundra,pharos3d/tundra,pharos3d/tundra,realXtend/tundra,jesterKing/naali,jesterKing/naali,antont/tundra
|
import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
Add placeable to javascript context
|
import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
try:
ent.placeable
except:
pass
else:
ctx['placeable'] = ent.placeable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
|
<commit_before>import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
<commit_msg>Add placeable to javascript context<commit_after>
|
import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
try:
ent.placeable
except:
pass
else:
ctx['placeable'] = ent.placeable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
|
import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
Add placeable to javascript contextimport json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
try:
ent.placeable
except:
pass
else:
ctx['placeable'] = ent.placeable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
|
<commit_before>import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
<commit_msg>Add placeable to javascript context<commit_after>import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
try:
ent.placeable
except:
pass
else:
ctx['placeable'] = ent.placeable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
|
4c922ea8e2ebd6e2ffb001f4733fcf7fa5edc250
|
shuup_workbench/wsgi.py
|
shuup_workbench/wsgi.py
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings")
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings") # noqa
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
|
Mark env setter line as noqa
|
workbench: Mark env setter line as noqa
|
Python
|
agpl-3.0
|
shoopio/shoop,suutari-ai/shoop,shoopio/shoop,shoopio/shoop,suutari-ai/shoop,suutari-ai/shoop
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings")
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
workbench: Mark env setter line as noqa
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings") # noqa
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
|
<commit_before># This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings")
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
<commit_msg>workbench: Mark env setter line as noqa<commit_after>
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings") # noqa
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings")
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
workbench: Mark env setter line as noqa# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings") # noqa
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
|
<commit_before># This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings")
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
<commit_msg>workbench: Mark env setter line as noqa<commit_after># This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
"""
WSGI config for shuup_workbench project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "shuup_workbench.settings") # noqa
from django.core.wsgi import get_wsgi_application # noqa (E402)
application = get_wsgi_application()
|
92221186166c0d7d6a593eb38fb4b0845c23634b
|
eue.py
|
eue.py
|
#!/usr/bin/env python
import os
import sys
from bottle import route,run,template,static_file
#from lib import mydb
base=os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
def getDataStructure():
return {
"title" : "",
"page" : "login.tpl",
"nav" : True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename,root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index',page='index',data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
return template('login',page='login',data=data)
run(host='localhost',port=8080,reloader=True,debug=True)
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import sys
from bottle import route, run, template, static_file, get, post
from lib import eueauth
def getDataStructure():
""" initialize a default dict passed to templates """
return {
"title": "",
"page": "login.tpl",
"nav": True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename, root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index', page='index', data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
user = request.forms.get("user")
password = request.forms.get("password")
return template('login', page='login', data=data)
if __name__ == '__main__':
base = os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
run(host='localhost', port=8080, reloader=True, debug=True)
|
Fix module import problem, make code more pep compliant, add utf8 encoding
|
Fix module import problem, make code more pep compliant, add utf8 encoding
|
Python
|
agpl-3.0
|
david-guenault/eue-ng,david-guenault/eue-ng,david-guenault/eue-ng,david-guenault/eue-ng,david-guenault/eue-ng,david-guenault/eue-ng
|
#!/usr/bin/env python
import os
import sys
from bottle import route,run,template,static_file
#from lib import mydb
base=os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
def getDataStructure():
return {
"title" : "",
"page" : "login.tpl",
"nav" : True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename,root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index',page='index',data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
return template('login',page='login',data=data)
run(host='localhost',port=8080,reloader=True,debug=True)Fix module import problem, make code more pep compliant, add utf8 encoding
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import sys
from bottle import route, run, template, static_file, get, post
from lib import eueauth
def getDataStructure():
""" initialize a default dict passed to templates """
return {
"title": "",
"page": "login.tpl",
"nav": True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename, root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index', page='index', data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
user = request.forms.get("user")
password = request.forms.get("password")
return template('login', page='login', data=data)
if __name__ == '__main__':
base = os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
run(host='localhost', port=8080, reloader=True, debug=True)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from bottle import route,run,template,static_file
#from lib import mydb
base=os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
def getDataStructure():
return {
"title" : "",
"page" : "login.tpl",
"nav" : True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename,root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index',page='index',data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
return template('login',page='login',data=data)
run(host='localhost',port=8080,reloader=True,debug=True)<commit_msg>Fix module import problem, make code more pep compliant, add utf8 encoding<commit_after>
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import sys
from bottle import route, run, template, static_file, get, post
from lib import eueauth
def getDataStructure():
""" initialize a default dict passed to templates """
return {
"title": "",
"page": "login.tpl",
"nav": True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename, root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index', page='index', data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
user = request.forms.get("user")
password = request.forms.get("password")
return template('login', page='login', data=data)
if __name__ == '__main__':
base = os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
run(host='localhost', port=8080, reloader=True, debug=True)
|
#!/usr/bin/env python
import os
import sys
from bottle import route,run,template,static_file
#from lib import mydb
base=os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
def getDataStructure():
return {
"title" : "",
"page" : "login.tpl",
"nav" : True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename,root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index',page='index',data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
return template('login',page='login',data=data)
run(host='localhost',port=8080,reloader=True,debug=True)Fix module import problem, make code more pep compliant, add utf8 encoding#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import sys
from bottle import route, run, template, static_file, get, post
from lib import eueauth
def getDataStructure():
""" initialize a default dict passed to templates """
return {
"title": "",
"page": "login.tpl",
"nav": True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename, root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index', page='index', data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
user = request.forms.get("user")
password = request.forms.get("password")
return template('login', page='login', data=data)
if __name__ == '__main__':
base = os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
run(host='localhost', port=8080, reloader=True, debug=True)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from bottle import route,run,template,static_file
#from lib import mydb
base=os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
def getDataStructure():
return {
"title" : "",
"page" : "login.tpl",
"nav" : True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename,root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index',page='index',data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
return template('login',page='login',data=data)
run(host='localhost',port=8080,reloader=True,debug=True)<commit_msg>Fix module import problem, make code more pep compliant, add utf8 encoding<commit_after>#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import sys
from bottle import route, run, template, static_file, get, post
from lib import eueauth
def getDataStructure():
""" initialize a default dict passed to templates """
return {
"title": "",
"page": "login.tpl",
"nav": True
}
@route('/static/<filename:path>')
def static(filename):
""" return static files """
return static_file(filename, root="%s/static" % (base))
@route('/')
def index():
""" index page """
data = getDataStructure()
return template('index', page='index', data=data)
@route('/login')
def login():
""" authentication page """
data = getDataStructure()
data["nav"] = False
user = request.forms.get("user")
password = request.forms.get("password")
return template('login', page='login', data=data)
if __name__ == '__main__':
base = os.path.dirname(os.path.realpath(__file__))
dom0s = []
capacity = []
run(host='localhost', port=8080, reloader=True, debug=True)
|
4e66e9ff016ffc392caf4edb5735b77f518ba2b4
|
alignak_backend/models/uipref.py
|
alignak_backend/models/uipref.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
}
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'list',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
|
Update UI preferences model (list)
|
Update UI preferences model (list)
|
Python
|
agpl-3.0
|
Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
}
}
Update UI preferences model (list)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'list',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
}
}
<commit_msg>Update UI preferences model (list)<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'list',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
}
}
Update UI preferences model (list)#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'list',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
}
}
<commit_msg>Update UI preferences model (list)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'list',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
|
901a45b36f58312dd1a12c6f48a472cf73e4482c
|
model/sample/__init__.py
|
model/sample/__init__.py
|
# -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
debug_only=True
)
|
# -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
init_context=user.init_context,
debug_only=True
)
|
Add missing init_context to sample divison
|
Add missing init_context to sample divison
|
Python
|
mit
|
MarauderXtreme/sipa,agdsn/sipa,lukasjuhrich/sipa,fgrsnau/sipa,MarauderXtreme/sipa,agdsn/sipa,fgrsnau/sipa,agdsn/sipa,fgrsnau/sipa,lukasjuhrich/sipa,agdsn/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa
|
# -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
debug_only=True
)
Add missing init_context to sample divison
|
# -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
init_context=user.init_context,
debug_only=True
)
|
<commit_before># -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
debug_only=True
)
<commit_msg>Add missing init_context to sample divison<commit_after>
|
# -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
init_context=user.init_context,
debug_only=True
)
|
# -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
debug_only=True
)
Add missing init_context to sample divison# -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
init_context=user.init_context,
debug_only=True
)
|
<commit_before># -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
debug_only=True
)
<commit_msg>Add missing init_context to sample divison<commit_after># -*- coding: utf-8 -*-
from flask.ext.babel import gettext
from ..division import Division
import user
division = Division(
name='sample',
display_name=gettext("Beispielsektion"),
user_class=user.User,
init_context=user.init_context,
debug_only=True
)
|
c80fc3c31003e6ecec049ac2e1ca370e58ab2b3c
|
mediasync/processors/yuicompressor.py
|
mediasync/processors/yuicompressor.py
|
from django.conf import settings
from mediasync import JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
from django.conf import settings
from mediasync import CSS_MIMETYPES, JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in CSS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
Replace incorrect JS_MIMETYPES with CSS_MIMETYPES
|
Replace incorrect JS_MIMETYPES with CSS_MIMETYPES
|
Python
|
bsd-3-clause
|
sunlightlabs/django-mediasync,mntan/django-mediasync,mntan/django-mediasync,sunlightlabs/django-mediasync,sunlightlabs/django-mediasync,mntan/django-mediasync
|
from django.conf import settings
from mediasync import JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
Replace incorrect JS_MIMETYPES with CSS_MIMETYPES
|
from django.conf import settings
from mediasync import CSS_MIMETYPES, JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in CSS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
<commit_before>from django.conf import settings
from mediasync import JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
<commit_msg>Replace incorrect JS_MIMETYPES with CSS_MIMETYPES<commit_after>
|
from django.conf import settings
from mediasync import CSS_MIMETYPES, JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in CSS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
from django.conf import settings
from mediasync import JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
Replace incorrect JS_MIMETYPES with CSS_MIMETYPESfrom django.conf import settings
from mediasync import CSS_MIMETYPES, JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in CSS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
<commit_before>from django.conf import settings
from mediasync import JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
<commit_msg>Replace incorrect JS_MIMETYPES with CSS_MIMETYPES<commit_after>from django.conf import settings
from mediasync import CSS_MIMETYPES, JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in CSS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
a3241e33f189cee4f7b5955e880ca3cc18e2694f
|
before_after_filesystem_snapshot.py
|
before_after_filesystem_snapshot.py
|
def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
|
def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
def generate_artifact_rules(snapshot):
'''
Generate Artifact Rules given which files have been added, which have been
removed, which have been modified, and which have remained unchanged.
'''
expected_materials = []
expected_products = []
# TODO: missing rules for MATCH since we don't have the information of the
# material from the previous step
for file in snapshot[0]:
# unchanged files
expected_materials.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_materials.append(["ALLOW", file])
for file in snapshot[3]:
# removed files
expected_materials.append(["DELETE", file])
expected_materials.append(["DISALLOW", "*"])
for file in snapshot[0]:
# unchanged files
expected_products.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_products.append(["MODIFY", file])
for file in snapshot[2]:
# added files
expected_products.append(["CREATE", file])
expected_products.append(["DISALLOW", "*"])
return {
'expected_materials': expected_materials,
'expected_products': expected_products
}
|
Add function to generate artifact rules.
|
Add function to generate artifact rules.
Adds a function to before_after_filesystem_snapshot.py called
generate_artifact_rules.
|
Python
|
mit
|
in-toto/layout-web-tool,in-toto/layout-web-tool,in-toto/layout-web-tool
|
def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
Add function to generate artifact rules.
Adds a function to before_after_filesystem_snapshot.py called
generate_artifact_rules.
|
def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
def generate_artifact_rules(snapshot):
'''
Generate Artifact Rules given which files have been added, which have been
removed, which have been modified, and which have remained unchanged.
'''
expected_materials = []
expected_products = []
# TODO: missing rules for MATCH since we don't have the information of the
# material from the previous step
for file in snapshot[0]:
# unchanged files
expected_materials.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_materials.append(["ALLOW", file])
for file in snapshot[3]:
# removed files
expected_materials.append(["DELETE", file])
expected_materials.append(["DISALLOW", "*"])
for file in snapshot[0]:
# unchanged files
expected_products.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_products.append(["MODIFY", file])
for file in snapshot[2]:
# added files
expected_products.append(["CREATE", file])
expected_products.append(["DISALLOW", "*"])
return {
'expected_materials': expected_materials,
'expected_products': expected_products
}
|
<commit_before>def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
<commit_msg>Add function to generate artifact rules.
Adds a function to before_after_filesystem_snapshot.py called
generate_artifact_rules.<commit_after>
|
def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
def generate_artifact_rules(snapshot):
'''
Generate Artifact Rules given which files have been added, which have been
removed, which have been modified, and which have remained unchanged.
'''
expected_materials = []
expected_products = []
# TODO: missing rules for MATCH since we don't have the information of the
# material from the previous step
for file in snapshot[0]:
# unchanged files
expected_materials.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_materials.append(["ALLOW", file])
for file in snapshot[3]:
# removed files
expected_materials.append(["DELETE", file])
expected_materials.append(["DISALLOW", "*"])
for file in snapshot[0]:
# unchanged files
expected_products.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_products.append(["MODIFY", file])
for file in snapshot[2]:
# added files
expected_products.append(["CREATE", file])
expected_products.append(["DISALLOW", "*"])
return {
'expected_materials': expected_materials,
'expected_products': expected_products
}
|
def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
Add function to generate artifact rules.
Adds a function to before_after_filesystem_snapshot.py called
generate_artifact_rules.def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
def generate_artifact_rules(snapshot):
'''
Generate Artifact Rules given which files have been added, which have been
removed, which have been modified, and which have remained unchanged.
'''
expected_materials = []
expected_products = []
# TODO: missing rules for MATCH since we don't have the information of the
# material from the previous step
for file in snapshot[0]:
# unchanged files
expected_materials.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_materials.append(["ALLOW", file])
for file in snapshot[3]:
# removed files
expected_materials.append(["DELETE", file])
expected_materials.append(["DISALLOW", "*"])
for file in snapshot[0]:
# unchanged files
expected_products.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_products.append(["MODIFY", file])
for file in snapshot[2]:
# added files
expected_products.append(["CREATE", file])
expected_products.append(["DISALLOW", "*"])
return {
'expected_materials': expected_materials,
'expected_products': expected_products
}
|
<commit_before>def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
<commit_msg>Add function to generate artifact rules.
Adds a function to before_after_filesystem_snapshot.py called
generate_artifact_rules.<commit_after>def snapshot(before_dict, after_dict):
'''before_after_snapshot is a simple function that returns which files were
unchanged, modified, added or removed from an input dictionary (before_dict)
and an output dictionary (after_dict). Both these dictionaries have file
names as the keys and their hashes as the values.'''
unchanged_files = []
modified_files = []
added_files = []
removed_files = []
for key in before_dict:
if key in after_dict:
if before_dict[key] == after_dict[key]:
# Matching the hashes to check if file was unchanged
unchanged_files.append(key)
else:
modified_files.append(key)
else:
removed_files.append(key)
for key in after_dict:
if key not in before_dict:
# Looking for new files
added_files.append(key)
# Returning the snapshot of the new file system
return (sorted(unchanged_files), sorted(modified_files), sorted(added_files),
sorted(removed_files))
def generate_artifact_rules(snapshot):
'''
Generate Artifact Rules given which files have been added, which have been
removed, which have been modified, and which have remained unchanged.
'''
expected_materials = []
expected_products = []
# TODO: missing rules for MATCH since we don't have the information of the
# material from the previous step
for file in snapshot[0]:
# unchanged files
expected_materials.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_materials.append(["ALLOW", file])
for file in snapshot[3]:
# removed files
expected_materials.append(["DELETE", file])
expected_materials.append(["DISALLOW", "*"])
for file in snapshot[0]:
# unchanged files
expected_products.append(["ALLOW", file])
for file in snapshot[1]:
# modified files
expected_products.append(["MODIFY", file])
for file in snapshot[2]:
# added files
expected_products.append(["CREATE", file])
expected_products.append(["DISALLOW", "*"])
return {
'expected_materials': expected_materials,
'expected_products': expected_products
}
|
2745423ce5a7e9963038a529337a1d71d4465cba
|
core/views.py
|
core/views.py
|
from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename="index.html"):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
|
from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
if not filename:
filename = "index.html"
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
|
Fix empty file names messin stuff up.
|
Fix empty file names messin stuff up.
|
Python
|
mit
|
clarkperkins/readthedocs.org,kenwang76/readthedocs.org,pombredanne/readthedocs.org,LukasBoersma/readthedocs.org,davidfischer/readthedocs.org,raven47git/readthedocs.org,royalwang/readthedocs.org,mhils/readthedocs.org,sils1297/readthedocs.org,gjtorikian/readthedocs.org,nyergler/pythonslides,nyergler/pythonslides,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,VishvajitP/readthedocs.org,kenwang76/readthedocs.org,attakei/readthedocs-oauth,wijerasa/readthedocs.org,davidfischer/readthedocs.org,nikolas/readthedocs.org,davidfischer/readthedocs.org,sid-kap/readthedocs.org,agjohnson/readthedocs.org,espdev/readthedocs.org,GovReady/readthedocs.org,soulshake/readthedocs.org,sunnyzwh/readthedocs.org,raven47git/readthedocs.org,hach-que/readthedocs.org,emawind84/readthedocs.org,stevepiercy/readthedocs.org,soulshake/readthedocs.org,nikolas/readthedocs.org,gjtorikian/readthedocs.org,alex/readthedocs.org,attakei/readthedocs-oauth,kdkeyser/readthedocs.org,takluyver/readthedocs.org,mhils/readthedocs.org,chrisdickinson/tweezers,gjtorikian/readthedocs.org,sils1297/readthedocs.org,asampat3090/readthedocs.org,GovReady/readthedocs.org,asampat3090/readthedocs.org,kdkeyser/readthedocs.org,stevepiercy/readthedocs.org,fujita-shintaro/readthedocs.org,wijerasa/readthedocs.org,singingwolfboy/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,titiushko/readthedocs.org,VishvajitP/readthedocs.org,SteveViss/readthedocs.org,SteveViss/readthedocs.org,stevepiercy/readthedocs.org,emawind84/readthedocs.org,techtonik/readthedocs.org,istresearch/readthedocs.org,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,sunnyzwh/readthedocs.org,atsuyim/readthedocs.org,singingwolfboy/readthedocs.org,nikolas/readthedocs.org,hach-que/readthedocs.org,johncosta/private-readthedocs.org,dirn/readthedocs.org,royalwang/readthedocs.org,nyergler/pythonslides,d0ugal/readthedocs.org,jerel/readthedocs.org,tddv/readthedocs.org,Carreau/readthedocs.org,takluyver/readthedocs.org,fujita-shintaro/readthedocs.org,kenshinthebattosai/readthedocs.org,michaelmcandrew/readthedocs.org,techtonik/readthedocs.org,ojii/readthedocs.org,gjtorikian/readthedocs.org,rtfd/readthedocs.org,mrshoki/readthedocs.org,agjohnson/readthedocs.org,kenshinthebattosai/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,wanghaven/readthedocs.org,LukasBoersma/readthedocs.org,titiushko/readthedocs.org,SteveViss/readthedocs.org,laplaceliu/readthedocs.org,sunnyzwh/readthedocs.org,VishvajitP/readthedocs.org,cgourlay/readthedocs.org,titiushko/readthedocs.org,CedarLogic/readthedocs.org,sid-kap/readthedocs.org,Carreau/readthedocs.org,raven47git/readthedocs.org,cgourlay/readthedocs.org,attakei/readthedocs-oauth,sils1297/readthedocs.org,laplaceliu/readthedocs.org,sid-kap/readthedocs.org,sid-kap/readthedocs.org,mhils/readthedocs.org,hach-que/readthedocs.org,GovReady/readthedocs.org,LukasBoersma/readthedocs.org,attakei/readthedocs-oauth,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,espdev/readthedocs.org,michaelmcandrew/readthedocs.org,michaelmcandrew/readthedocs.org,Tazer/readthedocs.org,singingwolfboy/readthedocs.org,dirn/readthedocs.org,Carreau/readthedocs.org,istresearch/readthedocs.org,KamranMackey/readthedocs.org,stevepiercy/readthedocs.org,hach-que/readthedocs.org,jerel/readthedocs.org,CedarLogic/readthedocs.org,laplaceliu/readthedocs.org,espdev/readthedocs.org,wijerasa/readthedocs.org,safwanrahman/readthedocs.org,mrshoki/readthedocs.org,techtonik/readthedocs.org,kenshinthebattosai/readthedocs.org,d0ugal/readthedocs.org,SteveViss/readthedocs.org,GovReady/readthedocs.org,johncosta/private-readthedocs.org,atsuyim/readthedocs.org,jerel/readthedocs.org,soulshake/readthedocs.org,soulshake/readthedocs.org,sunnyzwh/readthedocs.org,titiushko/readthedocs.org,alex/readthedocs.org,chrisdickinson/tweezers,kdkeyser/readthedocs.org,alex/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,KamranMackey/readthedocs.org,jerel/readthedocs.org,techtonik/readthedocs.org,wanghaven/readthedocs.org,d0ugal/readthedocs.org,royalwang/readthedocs.org,wijerasa/readthedocs.org,nikolas/readthedocs.org,mrshoki/readthedocs.org,singingwolfboy/readthedocs.org,wanghaven/readthedocs.org,kenwang76/readthedocs.org,asampat3090/readthedocs.org,rtfd/readthedocs.org,alex/readthedocs.org,cgourlay/readthedocs.org,dirn/readthedocs.org,takluyver/readthedocs.org,kenshinthebattosai/readthedocs.org,kdkeyser/readthedocs.org,ojii/readthedocs.org,agjohnson/readthedocs.org,asampat3090/readthedocs.org,tddv/readthedocs.org,wanghaven/readthedocs.org,emawind84/readthedocs.org,d0ugal/readthedocs.org,ojii/readthedocs.org,pombredanne/readthedocs.org,espdev/readthedocs.org,KamranMackey/readthedocs.org,clarkperkins/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,michaelmcandrew/readthedocs.org,espdev/readthedocs.org,istresearch/readthedocs.org,takluyver/readthedocs.org,fujita-shintaro/readthedocs.org,raven47git/readthedocs.org,mrshoki/readthedocs.org,johncosta/private-readthedocs.org,royalwang/readthedocs.org,davidfischer/readthedocs.org,laplaceliu/readthedocs.org,Tazer/readthedocs.org,Carreau/readthedocs.org,VishvajitP/readthedocs.org,ojii/readthedocs.org,sils1297/readthedocs.org,clarkperkins/readthedocs.org,tddv/readthedocs.org,agjohnson/readthedocs.org,atsuyim/readthedocs.org,dirn/readthedocs.org,nyergler/pythonslides,mhils/readthedocs.org,KamranMackey/readthedocs.org,cgourlay/readthedocs.org,Tazer/readthedocs.org,safwanrahman/readthedocs.org,safwanrahman/readthedocs.org
|
from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename="index.html"):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
Fix empty file names messin stuff up.
|
from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
if not filename:
filename = "index.html"
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
|
<commit_before>from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename="index.html"):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
<commit_msg>Fix empty file names messin stuff up.<commit_after>
|
from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
if not filename:
filename = "index.html"
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
|
from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename="index.html"):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
Fix empty file names messin stuff up.from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
if not filename:
filename = "index.html"
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
|
<commit_before>from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename="index.html"):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
<commit_msg>Fix empty file names messin stuff up.<commit_after>from django.conf import settings
from django.views.static import serve
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_view_exempt
import json
import os
from projects.models import Project
from projects.tasks import update_docs
from projects.utils import get_project_path, find_file
@csrf_view_exempt
def github_build(request):
obj = json.loads(request.POST['payload'])
name = obj['repository']['name']
url = obj['repository']['url']
project = Project.objects.get(github_repo=url)
update_docs.delay(slug=project.slug)
return HttpResponse('Build Started')
def serve_docs(request, username, project_slug, filename):
proj = Project.objects.get(slug=project_slug, user__username=username)
project = proj.slug
path = get_project_path(proj)
if not filename:
filename = "index.html"
filename = filename.rstrip('/')
doc_base = os.path.join(path, project)
for possible_path in ['docs', 'doc']:
for pos_build in ['build', '_build', '.build']:
if os.path.exists(os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))):
final_path = os.path.join(doc_base, '%s/%s/html' % (possible_path, pos_build))
return serve(request, filename, final_path)
|
2327eb0c4db7d6b771777e8d73ec99a8e324391a
|
printzone.py
|
printzone.py
|
#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
for n in names:
print z[n].to_text(n)
except dns.exception.FormError:
print "The transfer encountered a problem."
|
#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
zone_xfr_array = []
for n in names:
current_record = z[n].to_text(n)
for split_record in current_record.split("\n"):
zone_xfr_array.append([split_record])
except dns.exception.FormError:
print "The transfer encountered a problem."
for current_record in zone_xfr_array:
print current_record
|
Split the XFR output into an array of values.
|
Split the XFR output into an array of values.
Had to split on \n. Need to go through the code more and figure out why it doesn't actually present itself
upon printing
|
Python
|
bsd-3-clause
|
jforman/python-ddns
|
#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
for n in names:
print z[n].to_text(n)
except dns.exception.FormError:
print "The transfer encountered a problem."
Split the XFR output into an array of values.
Had to split on \n. Need to go through the code more and figure out why it doesn't actually present itself
upon printing
|
#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
zone_xfr_array = []
for n in names:
current_record = z[n].to_text(n)
for split_record in current_record.split("\n"):
zone_xfr_array.append([split_record])
except dns.exception.FormError:
print "The transfer encountered a problem."
for current_record in zone_xfr_array:
print current_record
|
<commit_before>#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
for n in names:
print z[n].to_text(n)
except dns.exception.FormError:
print "The transfer encountered a problem."
<commit_msg>Split the XFR output into an array of values.
Had to split on \n. Need to go through the code more and figure out why it doesn't actually present itself
upon printing<commit_after>
|
#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
zone_xfr_array = []
for n in names:
current_record = z[n].to_text(n)
for split_record in current_record.split("\n"):
zone_xfr_array.append([split_record])
except dns.exception.FormError:
print "The transfer encountered a problem."
for current_record in zone_xfr_array:
print current_record
|
#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
for n in names:
print z[n].to_text(n)
except dns.exception.FormError:
print "The transfer encountered a problem."
Split the XFR output into an array of values.
Had to split on \n. Need to go through the code more and figure out why it doesn't actually present itself
upon printing#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
zone_xfr_array = []
for n in names:
current_record = z[n].to_text(n)
for split_record in current_record.split("\n"):
zone_xfr_array.append([split_record])
except dns.exception.FormError:
print "The transfer encountered a problem."
for current_record in zone_xfr_array:
print current_record
|
<commit_before>#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
for n in names:
print z[n].to_text(n)
except dns.exception.FormError:
print "The transfer encountered a problem."
<commit_msg>Split the XFR output into an array of values.
Had to split on \n. Need to go through the code more and figure out why it doesn't actually present itself
upon printing<commit_after>#!/usr/bin/env python
import dns.query
import dns.zone
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--server", dest="dns_server",
help="DNS server to query.",
type="string")
parser.add_option("--zone", dest="dns_zone",
help="Zone to print.",
type="string")
(options, args) = parser.parse_args()
print "Server: %s Zone: %s" % (options.dns_server, options.dns_zone)
try:
z = dns.zone.from_xfr(dns.query.xfr(options.dns_server, options.dns_zone))
names = z.nodes.keys()
names.sort()
zone_xfr_array = []
for n in names:
current_record = z[n].to_text(n)
for split_record in current_record.split("\n"):
zone_xfr_array.append([split_record])
except dns.exception.FormError:
print "The transfer encountered a problem."
for current_record in zone_xfr_array:
print current_record
|
c3571bf950862a17a0d2938167a3cb9912fab6d9
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_list_table': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
|
import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_plugins': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
|
Use the proper entry point name.
|
Use the proper entry point name.
|
Python
|
mit
|
coddingtonbear/jirafs-list-table
|
import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_list_table': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
Use the proper entry point name.
|
import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_plugins': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
|
<commit_before>import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_list_table': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
<commit_msg>Use the proper entry point name.<commit_after>
|
import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_plugins': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
|
import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_list_table': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
Use the proper entry point name.import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_plugins': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
|
<commit_before>import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_list_table': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
<commit_msg>Use the proper entry point name.<commit_after>import os
from setuptools import setup, find_packages
import uuid
from jirafs_list_table import __version__ as version_string
requirements_path = os.path.join(
os.path.dirname(__file__),
'requirements.txt',
)
try:
from pip.req import parse_requirements
requirements = [
str(req.req) for req in parse_requirements(
requirements_path,
session=uuid.uuid1()
)
]
except (ImportError, AttributeError, ValueError, TypeError):
requirements = []
with open(requirements_path, 'r') as in_:
requirements = [
req for req in in_.readlines()
if not req.startswith('-')
and not req.startswith('#')
]
setup(
name='jirafs_list_table',
version=version_string,
url='https://github.com/coddingtonbear/jirafs-list-table',
description="Make simple tables in JIRA more easily by using a simple list-based syntax.",
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
],
install_requires=requirements,
include_package_data=True,
packages=find_packages(),
entry_points={
'jirafs_plugins': [
'list_table = jirafs_list_table.plugin:Plugin',
]
},
)
|
3577007a2b48ca410a0a34a10f64adcdb3537912
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
|
Update the classification to production/stable.
|
Update the classification to production/stable.
|
Python
|
bsd-3-clause
|
benspaulding/django-gcframe
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
Update the classification to production/stable.
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
<commit_msg>Update the classification to production/stable.<commit_after>
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
|
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
Update the classification to production/stable.# -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
<commit_msg>Update the classification to production/stable.<commit_after># -*- coding: utf-8 -*-
import os
from distutils.core import setup
here = os.path.dirname(__file__)
def get_long_desc():
return open(os.path.join(here, 'README.rst')).read()
# Function borrowed from carljm.
def get_version():
fh = open(os.path.join(here, "gcframe", "__init__.py"))
try:
for line in fh.readlines():
if line.startswith("__version__ ="):
return line.split("=")[1].strip().strip("'")
finally:
fh.close()
setup(
name='django-gcframe',
version=get_version(),
description='Django middleware and decorators for working with Google Chrome Frame.',
url='https://github.com/benspaulding/django-gcframe/',
author='Ben Spaulding',
author_email='ben@benspaulding.us',
license='BSD',
download_url='https://github.com/benspaulding/django-gcframe/tarball/v%s' % get_version(),
long_description = get_long_desc(),
packages = [
'gcframe',
'gcframe.tests',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
)
|
3c94fc8a784420740caa8831363b6ebb8b1d6095
|
django_archive/archivers/__init__.py
|
django_archive/archivers/__init__.py
|
from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
|
from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
FORMATS = (
(TARBALL, "Tarball (.tar)"),
(TARBALL_GZ, "gzip-compressed Tarball (.tar.gz)"),
(TARBALL_BZ2, "bzip2-compressed Tarball (.tar.bz2)"),
(TARBALL_XZ, "xz-compressed Tarball (.tar.xz)"),
(ZIP, "ZIP archive (.zip)"),
)
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
|
Add tuple containing all supported archive formats and their human-readable description.
|
Add tuple containing all supported archive formats and their human-readable description.
|
Python
|
mit
|
nathan-osman/django-archive,nathan-osman/django-archive
|
from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
Add tuple containing all supported archive formats and their human-readable description.
|
from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
FORMATS = (
(TARBALL, "Tarball (.tar)"),
(TARBALL_GZ, "gzip-compressed Tarball (.tar.gz)"),
(TARBALL_BZ2, "bzip2-compressed Tarball (.tar.bz2)"),
(TARBALL_XZ, "xz-compressed Tarball (.tar.xz)"),
(ZIP, "ZIP archive (.zip)"),
)
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
|
<commit_before>from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
<commit_msg>Add tuple containing all supported archive formats and their human-readable description.<commit_after>
|
from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
FORMATS = (
(TARBALL, "Tarball (.tar)"),
(TARBALL_GZ, "gzip-compressed Tarball (.tar.gz)"),
(TARBALL_BZ2, "bzip2-compressed Tarball (.tar.bz2)"),
(TARBALL_XZ, "xz-compressed Tarball (.tar.xz)"),
(ZIP, "ZIP archive (.zip)"),
)
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
|
from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
Add tuple containing all supported archive formats and their human-readable description.from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
FORMATS = (
(TARBALL, "Tarball (.tar)"),
(TARBALL_GZ, "gzip-compressed Tarball (.tar.gz)"),
(TARBALL_BZ2, "bzip2-compressed Tarball (.tar.bz2)"),
(TARBALL_XZ, "xz-compressed Tarball (.tar.xz)"),
(ZIP, "ZIP archive (.zip)"),
)
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
|
<commit_before>from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
<commit_msg>Add tuple containing all supported archive formats and their human-readable description.<commit_after>from .tarball import TarballArchiver
from .zipfile import ZipArchiver
TARBALL = TarballArchiver.UNCOMPRESSED
TARBALL_GZ = TarballArchiver.GZ
TARBALL_BZ2 = TarballArchiver.BZ2
TARBALL_XZ = TarballArchiver.XZ
ZIP = 'zip'
FORMATS = (
(TARBALL, "Tarball (.tar)"),
(TARBALL_GZ, "gzip-compressed Tarball (.tar.gz)"),
(TARBALL_BZ2, "bzip2-compressed Tarball (.tar.bz2)"),
(TARBALL_XZ, "xz-compressed Tarball (.tar.xz)"),
(ZIP, "ZIP archive (.zip)"),
)
def get_archiver(fmt):
"""
Return the class corresponding with the provided archival format
"""
if fmt in (TARBALL, TARBALL_GZ, TARBALL_BZ2, TARBALL_XZ):
return TarballArchiver
if fmt == ZIP:
return ZipArchiver
raise KeyError("Invalid format '{}' specified".format(fmt))
|
ee494fd205c58029960d4a5702f59418c8110eb3
|
django_iceberg/context_processors.py
|
django_iceberg/context_processors.py
|
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
res['username'] = init_iceberg(request).username
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
Add username to context in iceberg_settings context processor
|
Add username to context in iceberg_settings context processor
|
Python
|
mit
|
izberg-marketplace/django-izberg,izberg-marketplace/django-izberg,Iceberg-Marketplace/django-iceberg,Iceberg-Marketplace/django-iceberg
|
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
Add username to context in iceberg_settings context processor
|
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
res['username'] = init_iceberg(request).username
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
<commit_before># -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
<commit_msg>Add username to context in iceberg_settings context processor<commit_after>
|
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
res['username'] = init_iceberg(request).username
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
Add username to context in iceberg_settings context processor# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
res['username'] = init_iceberg(request).username
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
<commit_before># -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
<commit_msg>Add username to context in iceberg_settings context processor<commit_after># -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from django_iceberg.auth_utils import init_iceberg, get_conf_class
def iceberg_settings(request):
"""
Defines some template variables in context
"""
conf = get_conf_class(request)
if not conf:
ICEBERG_API_URL_FULL = "https://api.iceberg.technology"
ICEBERG_CORS = "https://api.iceberg.technology/cors/"
iceberg_env = 'prod'
else:
iceberg_env = getattr(conf, 'ICEBERG_ENV', 'prod')
ICEBERG_API_URL_FULL = conf.ICEBERG_API_URL_FULL
ICEBERG_CORS = conf.ICEBERG_CORS
res = {
"ICEBERG_ENV": iceberg_env,
"ICEBERG_API_URL": ICEBERG_API_URL_FULL,
"ICEBERG_CORS": ICEBERG_CORS,
}
if request.user.is_authenticated():
res['access_token'] = init_iceberg(request).access_token
res['username'] = init_iceberg(request).username
else:
res['ICEBERG_ENV'] = None
res['access_token'] = "anonymous"
return res
|
ac5b383520286c1c9d1aadc9a46fb900e4227b55
|
setup.py
|
setup.py
|
from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
|
from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
|
Set pypi development status to Pre-Alpha
|
Set pypi development status to Pre-Alpha
|
Python
|
mit
|
pior/caravan
|
from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
Set pypi development status to Pre-Alpha
|
from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
|
<commit_before>from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
<commit_msg>Set pypi development status to Pre-Alpha<commit_after>
|
from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
|
from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
Set pypi development status to Pre-Alphafrom setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
|
<commit_before>from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
<commit_msg>Set pypi development status to Pre-Alpha<commit_after>from setuptools import find_packages
from setuptools import setup
setup(
name='caravan',
version='0.0.3.dev0',
description='Light python framework for AWS SWF',
long_description=open('README.rst').read(),
keywords='AWS SWF workflow distributed background task',
author='Pior Bastida',
author_email='pior@pbastida.net',
url='https://github.com/pior/caravan',
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=['boto3'],
extras_require={
'dev': ['zest.releaser[recommended]'],
},
entry_points={
"console_scripts": [
"caravan-decider = caravan.commands.decider:Command.main",
"caravan-start = caravan.commands.start:Command.main",
"caravan-signal = caravan.commands.signal:Command.main",
]
},
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet",
"Programming Language :: Python :: 2.7",
# "Programming Language :: Python :: 3.4",
# "Programming Language :: Python :: 3.5"
],
)
|
4e20ea7d100d1611b018d5dede4a17915959d6f1
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
install_requires=[
'requests==2.0.0',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests==2.0.0',
],
)
|
Include package data in release
|
Include package data in release
|
Python
|
mit
|
rkhleics/police-api-client-python
|
#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
install_requires=[
'requests==2.0.0',
],
)
Include package data in release
|
#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests==2.0.0',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
install_requires=[
'requests==2.0.0',
],
)
<commit_msg>Include package data in release<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests==2.0.0',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
install_requires=[
'requests==2.0.0',
],
)
Include package data in release#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests==2.0.0',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
install_requires=[
'requests==2.0.0',
],
)
<commit_msg>Include package data in release<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
execfile('police_api/version.py')
setup(
name='police-api-client',
version=__version__, # NOQA
description='Python client library for the Police API',
long_description=open('README.rst').read(),
author='Rock Kitchen Harris',
license='MIT',
url='https://github.com/rkhleics/police-api-client-python',
download_url='https://github.com/rkhleics/police-api-client-python/downloads',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests==2.0.0',
],
)
|
47dedd31b9ee0f768ca3f9f781133458ddc99f4f
|
setup.py
|
setup.py
|
from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['ipython', 'flake8'],
}
)
|
from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['sphinx', 'ipython', 'flake8'],
}
)
|
Add sphinx to dev requirements
|
Add sphinx to dev requirements
|
Python
|
mit
|
Turbasen/turbasen.py
|
from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['ipython', 'flake8'],
}
)
Add sphinx to dev requirements
|
from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['sphinx', 'ipython', 'flake8'],
}
)
|
<commit_before>from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['ipython', 'flake8'],
}
)
<commit_msg>Add sphinx to dev requirements<commit_after>
|
from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['sphinx', 'ipython', 'flake8'],
}
)
|
from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['ipython', 'flake8'],
}
)
Add sphinx to dev requirementsfrom setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['sphinx', 'ipython', 'flake8'],
}
)
|
<commit_before>from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['ipython', 'flake8'],
}
)
<commit_msg>Add sphinx to dev requirements<commit_after>from setuptools import setup
name = 'turbasen'
VERSION = '2.5.0'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description='Documentation: https://turbasenpy.readthedocs.io/',
author='Ali Kaafarani',
author_email='ali.kaafarani@dnt.no',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['requests>=2.10.0,<3'],
extras_require={
'dev': ['sphinx', 'ipython', 'flake8'],
}
)
|
8e934707349079353b00d8a8be8c99431b357595
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']),
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
|
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']) + ['twisted.plugins'],
package_data={'twisted.plugins': ['twisted/plugins/*.py']},
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
# NOTE: See https://github.com/pypa/pip/issues/355 regarding Twisted
# plugins and "pip uninstall"
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
|
Include Twisted plugin in install.
|
Include Twisted plugin in install.
|
Python
|
bsd-3-clause
|
TouK/vumi,TouK/vumi,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix
|
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']),
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
Include Twisted plugin in install.
|
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']) + ['twisted.plugins'],
package_data={'twisted.plugins': ['twisted/plugins/*.py']},
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
# NOTE: See https://github.com/pypa/pip/issues/355 regarding Twisted
# plugins and "pip uninstall"
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
|
<commit_before>from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']),
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
<commit_msg>Include Twisted plugin in install.<commit_after>
|
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']) + ['twisted.plugins'],
package_data={'twisted.plugins': ['twisted/plugins/*.py']},
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
# NOTE: See https://github.com/pypa/pip/issues/355 regarding Twisted
# plugins and "pip uninstall"
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
|
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']),
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
Include Twisted plugin in install.from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']) + ['twisted.plugins'],
package_data={'twisted.plugins': ['twisted/plugins/*.py']},
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
# NOTE: See https://github.com/pypa/pip/issues/355 regarding Twisted
# plugins and "pip uninstall"
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
|
<commit_before>from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']),
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
<commit_msg>Include Twisted plugin in install.<commit_after>from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').read().split('\n'))
setup(
name="vumi",
version="0.4.0a",
url='http://github.com/praekelt/vumi',
license='BSD',
description="Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description=open('README.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(exclude=['environments']) + ['twisted.plugins'],
package_data={'twisted.plugins': ['twisted/plugins/*.py']},
include_package_data=True,
install_requires=['setuptools'] + listify('requirements.pip'),
# NOTE: See https://github.com/pypa/pip/issues/355 regarding Twisted
# plugins and "pip uninstall"
dependency_links=[
'https://github.com/dmaclay/python-smpp/zipball/develop#egg=python-smpp',
'https://github.com/dustin/twitty-twister/zipball/master#egg=twitty-twister',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
|
fb1d6d40446a6f51d146c2d426b3a7e5509441f6
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
|
Change trove classifier to show project is now stable
|
Change trove classifier to show project is now stable
|
Python
|
mit
|
cdown/srt
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
Change trove classifier to show project is now stable
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
<commit_msg>Change trove classifier to show project is now stable<commit_after>
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
Change trove classifier to show project is now stable#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
<commit_msg>Change trove classifier to show project is now stable<commit_after>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name='srt',
version='1.0.0',
description='A tiny library for parsing, modifying, and composing SRT '
'files.',
long_description=README,
author='Chris Down',
author_email='chris@chrisdown.name',
url='https://github.com/cdown/srt',
py_modules=['srt'],
license='ISC',
zip_safe=False,
keywords='srt',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Text Processing',
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
|
66a44d74fa4af27c9d0de86865fc32352c684183
|
setup.py
|
setup.py
|
import os
import sys
from os.path import join as pjoin
from setuptools import setup
from setuptools import Command
from subprocess import call
def read_version_string():
version = None
sys.path.insert(0, pjoin(os.getcwd()))
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
import os
import sys
from setuptools import setup
def read_version_string():
version = None
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, current_dir)
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Add a correct dir to sys.path, remove unused imports.
|
Add a correct dir to sys.path, remove unused imports.
|
Python
|
apache-2.0
|
Kami/python-extra-log-formatters
|
import os
import sys
from os.path import join as pjoin
from setuptools import setup
from setuptools import Command
from subprocess import call
def read_version_string():
version = None
sys.path.insert(0, pjoin(os.getcwd()))
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Add a correct dir to sys.path, remove unused imports.
|
import os
import sys
from setuptools import setup
def read_version_string():
version = None
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, current_dir)
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>import os
import sys
from os.path import join as pjoin
from setuptools import setup
from setuptools import Command
from subprocess import call
def read_version_string():
version = None
sys.path.insert(0, pjoin(os.getcwd()))
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Add a correct dir to sys.path, remove unused imports.<commit_after>
|
import os
import sys
from setuptools import setup
def read_version_string():
version = None
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, current_dir)
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
import os
import sys
from os.path import join as pjoin
from setuptools import setup
from setuptools import Command
from subprocess import call
def read_version_string():
version = None
sys.path.insert(0, pjoin(os.getcwd()))
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Add a correct dir to sys.path, remove unused imports.import os
import sys
from setuptools import setup
def read_version_string():
version = None
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, current_dir)
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>import os
import sys
from os.path import join as pjoin
from setuptools import setup
from setuptools import Command
from subprocess import call
def read_version_string():
version = None
sys.path.insert(0, pjoin(os.getcwd()))
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Add a correct dir to sys.path, remove unused imports.<commit_after>import os
import sys
from setuptools import setup
def read_version_string():
version = None
current_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, current_dir)
from log_formatters import __version__
version = __version__
sys.path.pop(0)
return version
setup(
name='log-formatters',
version=read_version_string(),
#long_description=open('README.rst').read() + '\n\n' +
#open('CHANGES.rst').read(),
packages=[
'log_formatters'
],
package_dir={
'log_formatters': 'log_formatters'
},
url='https://github.com/Kami/python-log-formatters/',
license='Apache License (2.0)',
author='Tomaz Muraus',
author_email='tomaz+pypi@tomaz.me',
description='A collection of useful Python log formatter classes.',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
cb84aa95759234ff2d7f8aa6b67e28eab382f9cc
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
version = "0.10"
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peer.tests",
"pycoinnet.peergroup",
"pycoinnet.peergroup.tests",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
|
#!/usr/bin/env python
from setuptools import setup
version = "0.19"
REQUIREMENTS = [i.strip() for i in open("requirements.txt").readlines()]
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peergroup",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
install_requires=REQUIREMENTS,
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
|
Include the requirements.txt contents in install_requires.
|
Include the requirements.txt contents in install_requires.
|
Python
|
mit
|
antiface/pycoinnet,antiface/pycoinnet,richardkiss/pycoinnet,richardkiss/pycoinnet
|
#!/usr/bin/env python
from setuptools import setup
version = "0.10"
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peer.tests",
"pycoinnet.peergroup",
"pycoinnet.peergroup.tests",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
Include the requirements.txt contents in install_requires.
|
#!/usr/bin/env python
from setuptools import setup
version = "0.19"
REQUIREMENTS = [i.strip() for i in open("requirements.txt").readlines()]
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peergroup",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
install_requires=REQUIREMENTS,
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
version = "0.10"
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peer.tests",
"pycoinnet.peergroup",
"pycoinnet.peergroup.tests",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
<commit_msg>Include the requirements.txt contents in install_requires.<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
version = "0.19"
REQUIREMENTS = [i.strip() for i in open("requirements.txt").readlines()]
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peergroup",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
install_requires=REQUIREMENTS,
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
|
#!/usr/bin/env python
from setuptools import setup
version = "0.10"
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peer.tests",
"pycoinnet.peergroup",
"pycoinnet.peergroup.tests",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
Include the requirements.txt contents in install_requires.#!/usr/bin/env python
from setuptools import setup
version = "0.19"
REQUIREMENTS = [i.strip() for i in open("requirements.txt").readlines()]
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peergroup",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
install_requires=REQUIREMENTS,
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
version = "0.10"
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peer.tests",
"pycoinnet.peergroup",
"pycoinnet.peergroup.tests",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
<commit_msg>Include the requirements.txt contents in install_requires.<commit_after>#!/usr/bin/env python
from setuptools import setup
version = "0.19"
REQUIREMENTS = [i.strip() for i in open("requirements.txt").readlines()]
setup(
name="pycoinnet",
version=version,
packages=[
"pycoinnet",
"pycoinnet.examples",
"pycoinnet.helpers",
"pycoinnet.peer",
"pycoinnet.peergroup",
"pycoinnet.util",
],
entry_points = { 'console_scripts':
[
'blockwatcher = pycoinnet.examples.blockwatcher:main',
]
},
install_requires=REQUIREMENTS,
author="Richard Kiss",
author_email="him@richardkiss.com",
url="https://github.com/richardkiss/pycoinnet",
license="http://opensource.org/licenses/MIT",
description="Network utilities for communicating on the bitcoin network."
)
|
0f464b995968acbc4685b90472ef2af260b67381
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.1'])
|
from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.5'])
|
Upgrade requirements to be Django 1.2.5
|
Upgrade requirements to be Django 1.2.5
|
Python
|
agpl-3.0
|
pculture/mirocommunity,pculture/mirocommunity,pculture/mirocommunity,pculture/mirocommunity
|
from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.1'])
Upgrade requirements to be Django 1.2.5
|
from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.5'])
|
<commit_before>from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.1'])
<commit_msg>Upgrade requirements to be Django 1.2.5<commit_after>
|
from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.5'])
|
from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.1'])
Upgrade requirements to be Django 1.2.5from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.5'])
|
<commit_before>from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.1'])
<commit_msg>Upgrade requirements to be Django 1.2.5<commit_after>from setuptools import setup, find_packages
version = '1.2.0'
setup(name="Miro Community",
version=version,
packages=find_packages(),
author='Participatory Culture Foundation',
license='AGPLv3',
install_requires=['django==1.2.5'])
|
70cf8c4c49a12caf68ea3f5d1d6b138e21673981
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=['wagtail>=0.6'],
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
|
#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=[],
extras_require={
'full': ['wagtail>=0.6'],
},
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
|
Install wagtail only if needed.
|
Install wagtail only if needed.
In my case, I'm using not yet released to pypi wagtail, from master branch, and this setup script tries to install old wagtail and old django.
|
Python
|
bsd-2-clause
|
salvadormrf/wagtailsettings,salvadormrf/wagtailsettings
|
#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=['wagtail>=0.6'],
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
Install wagtail only if needed.
In my case, I'm using not yet released to pypi wagtail, from master branch, and this setup script tries to install old wagtail and old django.
|
#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=[],
extras_require={
'full': ['wagtail>=0.6'],
},
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
|
<commit_before>#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=['wagtail>=0.6'],
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
<commit_msg>Install wagtail only if needed.
In my case, I'm using not yet released to pypi wagtail, from master branch, and this setup script tries to install old wagtail and old django.<commit_after>
|
#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=[],
extras_require={
'full': ['wagtail>=0.6'],
},
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
|
#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=['wagtail>=0.6'],
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
Install wagtail only if needed.
In my case, I'm using not yet released to pypi wagtail, from master branch, and this setup script tries to install old wagtail and old django.#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=[],
extras_require={
'full': ['wagtail>=0.6'],
},
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
|
<commit_before>#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=['wagtail>=0.6'],
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
<commit_msg>Install wagtail only if needed.
In my case, I'm using not yet released to pypi wagtail, from master branch, and this setup script tries to install old wagtail and old django.<commit_after>#!/usr/bin/env python
"""
Install wagtailsettings using setuptools
"""
from wagtailsettings import __version__
with open('README.rst', 'r') as f:
readme = f.read()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='wagtailsettings',
version=__version__,
description='Admin-editable settings for Wagtail projects',
long_description=readme,
author='Tim Heap',
author_email='tim@takeflight.com.au',
url='https://bitbucket.org/takeflight/wagtailsettings',
install_requires=[],
extras_require={
'full': ['wagtail>=0.6'],
},
zip_safe=False,
license='BSD License',
packages=find_packages(),
include_package_data=True,
package_data={ },
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
],
)
|
8f8c20859828e887699e3acda4671d113cb4b011
|
setup.py
|
setup.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
extras_require={
'dataframe': ['pandas'],
},
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
|
Add pandas as optional dependency.
|
Add pandas as optional dependency.
|
Python
|
mit
|
arkottke/pysra
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
Add pandas as optional dependency.
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
extras_require={
'dataframe': ['pandas'],
},
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
|
<commit_before>#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
<commit_msg>Add pandas as optional dependency.<commit_after>
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
extras_require={
'dataframe': ['pandas'],
},
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
Add pandas as optional dependency.#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
extras_require={
'dataframe': ['pandas'],
},
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
|
<commit_before>#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
<commit_msg>Add pandas as optional dependency.<commit_after>#!/usr/bin/python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as fp:
readme = fp.read()
with open('HISTORY.rst') as fp:
history = fp.read()
setup(
name='pySRA',
version='0.4.10',
description='Site Response Analysis with Python',
long_description=readme + '\n\n' + history,
author='Albert Kottke',
author_email='albert.kottke@gmail.com',
url='http://github.com/arkottke/pysra',
packages=find_packages(),
install_requires=[
'matplotlib',
'numpy',
'pyrvt',
'scipy',
'setuptools',
'typing',
],
extras_require={
'dataframe': ['pandas'],
},
keywords='site response',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Intended Audience :: Science/Research',
],
test_suite='tests', )
|
2a6dbe27150c6d82daee48f054936088467f431f
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple distributed job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"setproctitle",
"redis",
"python-dateutil",
"pytz"
],
dependency_links=[
"http://github.com/surfly/gevent/tarball/1.0rc3#egg=gevent-1.0dev"
],
tests_require=[
"mock",
"nose"
]
)
|
from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple asynchronous job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"python-dateutil",
"pytz"
],
extras_require={
"server": [
"setproctitle",
"redis"
]
},
tests_require=[
"mock",
"nose"
]
)
|
Split out the dependencies of client and server.
|
Split out the dependencies of client and server.
|
Python
|
mit
|
wglass/rotterdam
|
from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple distributed job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"setproctitle",
"redis",
"python-dateutil",
"pytz"
],
dependency_links=[
"http://github.com/surfly/gevent/tarball/1.0rc3#egg=gevent-1.0dev"
],
tests_require=[
"mock",
"nose"
]
)
Split out the dependencies of client and server.
|
from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple asynchronous job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"python-dateutil",
"pytz"
],
extras_require={
"server": [
"setproctitle",
"redis"
]
},
tests_require=[
"mock",
"nose"
]
)
|
<commit_before>from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple distributed job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"setproctitle",
"redis",
"python-dateutil",
"pytz"
],
dependency_links=[
"http://github.com/surfly/gevent/tarball/1.0rc3#egg=gevent-1.0dev"
],
tests_require=[
"mock",
"nose"
]
)
<commit_msg>Split out the dependencies of client and server.<commit_after>
|
from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple asynchronous job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"python-dateutil",
"pytz"
],
extras_require={
"server": [
"setproctitle",
"redis"
]
},
tests_require=[
"mock",
"nose"
]
)
|
from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple distributed job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"setproctitle",
"redis",
"python-dateutil",
"pytz"
],
dependency_links=[
"http://github.com/surfly/gevent/tarball/1.0rc3#egg=gevent-1.0dev"
],
tests_require=[
"mock",
"nose"
]
)
Split out the dependencies of client and server.from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple asynchronous job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"python-dateutil",
"pytz"
],
extras_require={
"server": [
"setproctitle",
"redis"
]
},
tests_require=[
"mock",
"nose"
]
)
|
<commit_before>from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple distributed job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"setproctitle",
"redis",
"python-dateutil",
"pytz"
],
dependency_links=[
"http://github.com/surfly/gevent/tarball/1.0rc3#egg=gevent-1.0dev"
],
tests_require=[
"mock",
"nose"
]
)
<commit_msg>Split out the dependencies of client and server.<commit_after>from setuptools import setup
setup(
name="rotterdam",
version="0.3.2",
description=(
"Simple asynchronous job queue via redis."
),
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/rotterdam",
packages=["rotterdam"],
include_package_data=True,
package_data={
'rotterdam': ['lua/*.lua']
},
scripts=[
"bin/rotterdam",
"bin/rotterdamctl"
],
install_requires=[
"python-dateutil",
"pytz"
],
extras_require={
"server": [
"setproctitle",
"redis"
]
},
tests_require=[
"mock",
"nose"
]
)
|
cd27849acae57a0382f66116771491576177a39e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = "See https://furtive.readthedocs.org"
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
|
#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = """
Github: https://github.com/dbryant4/furtive
"""
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
download_url='https://github.com/dbryant4/furtive',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
|
Add links to github project page
|
Add links to github project page
|
Python
|
mit
|
dbryant4/furtive
|
#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = "See https://furtive.readthedocs.org"
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
Add links to github project page
|
#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = """
Github: https://github.com/dbryant4/furtive
"""
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
download_url='https://github.com/dbryant4/furtive',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = "See https://furtive.readthedocs.org"
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
<commit_msg>Add links to github project page<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = """
Github: https://github.com/dbryant4/furtive
"""
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
download_url='https://github.com/dbryant4/furtive',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
|
#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = "See https://furtive.readthedocs.org"
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
Add links to github project page#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = """
Github: https://github.com/dbryant4/furtive
"""
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
download_url='https://github.com/dbryant4/furtive',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = "See https://furtive.readthedocs.org"
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
<commit_msg>Add links to github project page<commit_after>#!/usr/bin/env python
from distutils.core import setup
__VERSION__ = '0.2.4'
long_description = """
Github: https://github.com/dbryant4/furtive
"""
setup(name='Furtive',
version=__VERSION__,
description='File Integrity Verification System',
author='Derrick Bryant',
author_email='dbryant4@gmail.com',
long_description=long_description,
license='MIT',
packages=['furtive'],
scripts=['scripts/furtive'],
url='https://furtive.readthedocs.org',
download_url='https://github.com/dbryant4/furtive',
install_requires=[
'PyYAML==3.11',
'argparse==1.4.0'
]
)
|
e725a8ec2a0c998e9eeca100dfb6eb49035c343c
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
url='https://github.com/flowroute/two_factor_auth_python_demo',
author_email='developer@flowroute.com',
)
|
from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
license='MIT',
url='https://github.com/flowroute/sms-verification',
author_email='developer@flowroute.com',
)
|
Add license information, and update project url.
|
Add license information, and update project url.
|
Python
|
mit
|
flowroute/sms-verification,flowroute/sms-verification,flowroute/sms-verification
|
from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
url='https://github.com/flowroute/two_factor_auth_python_demo',
author_email='developer@flowroute.com',
)
Add license information, and update project url.
|
from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
license='MIT',
url='https://github.com/flowroute/sms-verification',
author_email='developer@flowroute.com',
)
|
<commit_before>from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
url='https://github.com/flowroute/two_factor_auth_python_demo',
author_email='developer@flowroute.com',
)
<commit_msg>Add license information, and update project url.<commit_after>
|
from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
license='MIT',
url='https://github.com/flowroute/sms-verification',
author_email='developer@flowroute.com',
)
|
from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
url='https://github.com/flowroute/two_factor_auth_python_demo',
author_email='developer@flowroute.com',
)
Add license information, and update project url.from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
license='MIT',
url='https://github.com/flowroute/sms-verification',
author_email='developer@flowroute.com',
)
|
<commit_before>from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
url='https://github.com/flowroute/two_factor_auth_python_demo',
author_email='developer@flowroute.com',
)
<commit_msg>Add license information, and update project url.<commit_after>from setuptools import setup
setup(name='sms_auth_service',
version='0.0.1',
description='An SMS backed authorization micro-service',
packages=['sms_auth_service'],
author='Flowroute Developers',
license='MIT',
url='https://github.com/flowroute/sms-verification',
author_email='developer@flowroute.com',
)
|
ed6d7fdbf24780baa1afa60961fc4bd22354ae8f
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==1.1.0',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
|
import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==2.2.1',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
|
Upgrade requests library to version 2.1.1
|
Upgrade requests library to version 2.1.1
Upgrade the `requests` library to match the version provided by Ubuntu
Trusty, version 2.1.1.
This is to prevent a conflict on Ubuntu Trusty between system Python
libraries and Pip libraries.
Specifically, Pip relies on the `IncompleteRead` module that is exported
by `requests.compat`. Version 2.4.3 of the `requests` library removed
that exported module[1].
When `ghtools` is installed, Pip would upgrade `requests` to version
2.4.3 (the latest available), thereby causing Pip to break because the
`requests` module installed by Pip (in
`/usr/lib/python2.7/dist-packages/`) takes precendence over the system
version of that module.
This was causing the following Puppet error on our ci-slave-4 box in Vagrant
using Ubuntu Trusty:
==> ci-slave-4: Error: Could not prefetch package provider 'pip': [nil, nil, [(provider=pip)], nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil]
When I tried to run `pip` on the Vagrant box, I got the following error:
vagrant@ci-slave-4:~$ pip
Traceback (most recent call last):
File "/usr/bin/pip", line 9, in <module>
load_entry_point('pip==1.5.4', 'console_scripts', 'pip')()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 351, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2363, in load_entry_point
return ep.load()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2088, in load
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
File "/usr/lib/python2.7/dist-packages/pip/__init__.py", line 11, in <module>
from pip.vcs import git, mercurial, subversion, bazaar # noqa
File "/usr/lib/python2.7/dist-packages/pip/vcs/mercurial.py", line 9, in <module>
from pip.download import path_to_url
File "/usr/lib/python2.7/dist-packages/pip/download.py", line 25, in <module>
from requests.compat import IncompleteRead
ImportError: cannot import name IncompleteRead
vagrant@ci-slave-4:~$
By installing requiring the exact same version of `requests` as the one
provided by the system under Ubuntu Trusty, Pip no longer needs to
install the `requests` module and `ghtools` will use the system module.
This is also tested under Precise and does not break Pip.
I verified the version of `requests` installed on Ubuntu Trusty prior to
installing `ghtools`:
vagrant@jumpbox-2:~/ghtools$ python
Python 2.7.6 (default, Mar 22 2014, 22:59:56)
[GCC 4.8.2] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import requests
>>> print requests.__version__
2.2.1
>>>
When installing `ghtools`, Pip detects the existing system version and
will not install its own:
vagrant@jumpbox-2:~/ghtools$ sudo pip install .
Unpacking /home/vagrant/ghtools
Running setup.py (path:/tmp/user/0/pip-0GLR1h-build/setup.py) egg_info for package from file:///home/vagrant/ghtools
Requirement already satisfied (use --upgrade to upgrade): requests==2.2.1 in /usr/lib/python2.7/dist-packages (from ghtools==0.21.0)
[...]
For more context, please see this bug report (though the bug is not in
python-pip):
https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1306991
[1]: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=766419
|
Python
|
mit
|
alphagov/ghtools
|
import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==1.1.0',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
Upgrade requests library to version 2.1.1
Upgrade the `requests` library to match the version provided by Ubuntu
Trusty, version 2.1.1.
This is to prevent a conflict on Ubuntu Trusty between system Python
libraries and Pip libraries.
Specifically, Pip relies on the `IncompleteRead` module that is exported
by `requests.compat`. Version 2.4.3 of the `requests` library removed
that exported module[1].
When `ghtools` is installed, Pip would upgrade `requests` to version
2.4.3 (the latest available), thereby causing Pip to break because the
`requests` module installed by Pip (in
`/usr/lib/python2.7/dist-packages/`) takes precendence over the system
version of that module.
This was causing the following Puppet error on our ci-slave-4 box in Vagrant
using Ubuntu Trusty:
==> ci-slave-4: Error: Could not prefetch package provider 'pip': [nil, nil, [(provider=pip)], nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil]
When I tried to run `pip` on the Vagrant box, I got the following error:
vagrant@ci-slave-4:~$ pip
Traceback (most recent call last):
File "/usr/bin/pip", line 9, in <module>
load_entry_point('pip==1.5.4', 'console_scripts', 'pip')()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 351, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2363, in load_entry_point
return ep.load()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2088, in load
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
File "/usr/lib/python2.7/dist-packages/pip/__init__.py", line 11, in <module>
from pip.vcs import git, mercurial, subversion, bazaar # noqa
File "/usr/lib/python2.7/dist-packages/pip/vcs/mercurial.py", line 9, in <module>
from pip.download import path_to_url
File "/usr/lib/python2.7/dist-packages/pip/download.py", line 25, in <module>
from requests.compat import IncompleteRead
ImportError: cannot import name IncompleteRead
vagrant@ci-slave-4:~$
By installing requiring the exact same version of `requests` as the one
provided by the system under Ubuntu Trusty, Pip no longer needs to
install the `requests` module and `ghtools` will use the system module.
This is also tested under Precise and does not break Pip.
I verified the version of `requests` installed on Ubuntu Trusty prior to
installing `ghtools`:
vagrant@jumpbox-2:~/ghtools$ python
Python 2.7.6 (default, Mar 22 2014, 22:59:56)
[GCC 4.8.2] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import requests
>>> print requests.__version__
2.2.1
>>>
When installing `ghtools`, Pip detects the existing system version and
will not install its own:
vagrant@jumpbox-2:~/ghtools$ sudo pip install .
Unpacking /home/vagrant/ghtools
Running setup.py (path:/tmp/user/0/pip-0GLR1h-build/setup.py) egg_info for package from file:///home/vagrant/ghtools
Requirement already satisfied (use --upgrade to upgrade): requests==2.2.1 in /usr/lib/python2.7/dist-packages (from ghtools==0.21.0)
[...]
For more context, please see this bug report (though the bug is not in
python-pip):
https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1306991
[1]: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=766419
|
import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==2.2.1',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
|
<commit_before>import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==1.1.0',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
<commit_msg>Upgrade requests library to version 2.1.1
Upgrade the `requests` library to match the version provided by Ubuntu
Trusty, version 2.1.1.
This is to prevent a conflict on Ubuntu Trusty between system Python
libraries and Pip libraries.
Specifically, Pip relies on the `IncompleteRead` module that is exported
by `requests.compat`. Version 2.4.3 of the `requests` library removed
that exported module[1].
When `ghtools` is installed, Pip would upgrade `requests` to version
2.4.3 (the latest available), thereby causing Pip to break because the
`requests` module installed by Pip (in
`/usr/lib/python2.7/dist-packages/`) takes precendence over the system
version of that module.
This was causing the following Puppet error on our ci-slave-4 box in Vagrant
using Ubuntu Trusty:
==> ci-slave-4: Error: Could not prefetch package provider 'pip': [nil, nil, [(provider=pip)], nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil]
When I tried to run `pip` on the Vagrant box, I got the following error:
vagrant@ci-slave-4:~$ pip
Traceback (most recent call last):
File "/usr/bin/pip", line 9, in <module>
load_entry_point('pip==1.5.4', 'console_scripts', 'pip')()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 351, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2363, in load_entry_point
return ep.load()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2088, in load
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
File "/usr/lib/python2.7/dist-packages/pip/__init__.py", line 11, in <module>
from pip.vcs import git, mercurial, subversion, bazaar # noqa
File "/usr/lib/python2.7/dist-packages/pip/vcs/mercurial.py", line 9, in <module>
from pip.download import path_to_url
File "/usr/lib/python2.7/dist-packages/pip/download.py", line 25, in <module>
from requests.compat import IncompleteRead
ImportError: cannot import name IncompleteRead
vagrant@ci-slave-4:~$
By installing requiring the exact same version of `requests` as the one
provided by the system under Ubuntu Trusty, Pip no longer needs to
install the `requests` module and `ghtools` will use the system module.
This is also tested under Precise and does not break Pip.
I verified the version of `requests` installed on Ubuntu Trusty prior to
installing `ghtools`:
vagrant@jumpbox-2:~/ghtools$ python
Python 2.7.6 (default, Mar 22 2014, 22:59:56)
[GCC 4.8.2] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import requests
>>> print requests.__version__
2.2.1
>>>
When installing `ghtools`, Pip detects the existing system version and
will not install its own:
vagrant@jumpbox-2:~/ghtools$ sudo pip install .
Unpacking /home/vagrant/ghtools
Running setup.py (path:/tmp/user/0/pip-0GLR1h-build/setup.py) egg_info for package from file:///home/vagrant/ghtools
Requirement already satisfied (use --upgrade to upgrade): requests==2.2.1 in /usr/lib/python2.7/dist-packages (from ghtools==0.21.0)
[...]
For more context, please see this bug report (though the bug is not in
python-pip):
https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1306991
[1]: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=766419<commit_after>
|
import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==2.2.1',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
|
import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==1.1.0',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
Upgrade requests library to version 2.1.1
Upgrade the `requests` library to match the version provided by Ubuntu
Trusty, version 2.1.1.
This is to prevent a conflict on Ubuntu Trusty between system Python
libraries and Pip libraries.
Specifically, Pip relies on the `IncompleteRead` module that is exported
by `requests.compat`. Version 2.4.3 of the `requests` library removed
that exported module[1].
When `ghtools` is installed, Pip would upgrade `requests` to version
2.4.3 (the latest available), thereby causing Pip to break because the
`requests` module installed by Pip (in
`/usr/lib/python2.7/dist-packages/`) takes precendence over the system
version of that module.
This was causing the following Puppet error on our ci-slave-4 box in Vagrant
using Ubuntu Trusty:
==> ci-slave-4: Error: Could not prefetch package provider 'pip': [nil, nil, [(provider=pip)], nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil]
When I tried to run `pip` on the Vagrant box, I got the following error:
vagrant@ci-slave-4:~$ pip
Traceback (most recent call last):
File "/usr/bin/pip", line 9, in <module>
load_entry_point('pip==1.5.4', 'console_scripts', 'pip')()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 351, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2363, in load_entry_point
return ep.load()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2088, in load
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
File "/usr/lib/python2.7/dist-packages/pip/__init__.py", line 11, in <module>
from pip.vcs import git, mercurial, subversion, bazaar # noqa
File "/usr/lib/python2.7/dist-packages/pip/vcs/mercurial.py", line 9, in <module>
from pip.download import path_to_url
File "/usr/lib/python2.7/dist-packages/pip/download.py", line 25, in <module>
from requests.compat import IncompleteRead
ImportError: cannot import name IncompleteRead
vagrant@ci-slave-4:~$
By installing requiring the exact same version of `requests` as the one
provided by the system under Ubuntu Trusty, Pip no longer needs to
install the `requests` module and `ghtools` will use the system module.
This is also tested under Precise and does not break Pip.
I verified the version of `requests` installed on Ubuntu Trusty prior to
installing `ghtools`:
vagrant@jumpbox-2:~/ghtools$ python
Python 2.7.6 (default, Mar 22 2014, 22:59:56)
[GCC 4.8.2] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import requests
>>> print requests.__version__
2.2.1
>>>
When installing `ghtools`, Pip detects the existing system version and
will not install its own:
vagrant@jumpbox-2:~/ghtools$ sudo pip install .
Unpacking /home/vagrant/ghtools
Running setup.py (path:/tmp/user/0/pip-0GLR1h-build/setup.py) egg_info for package from file:///home/vagrant/ghtools
Requirement already satisfied (use --upgrade to upgrade): requests==2.2.1 in /usr/lib/python2.7/dist-packages (from ghtools==0.21.0)
[...]
For more context, please see this bug report (though the bug is not in
python-pip):
https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1306991
[1]: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=766419import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==2.2.1',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
|
<commit_before>import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==1.1.0',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
<commit_msg>Upgrade requests library to version 2.1.1
Upgrade the `requests` library to match the version provided by Ubuntu
Trusty, version 2.1.1.
This is to prevent a conflict on Ubuntu Trusty between system Python
libraries and Pip libraries.
Specifically, Pip relies on the `IncompleteRead` module that is exported
by `requests.compat`. Version 2.4.3 of the `requests` library removed
that exported module[1].
When `ghtools` is installed, Pip would upgrade `requests` to version
2.4.3 (the latest available), thereby causing Pip to break because the
`requests` module installed by Pip (in
`/usr/lib/python2.7/dist-packages/`) takes precendence over the system
version of that module.
This was causing the following Puppet error on our ci-slave-4 box in Vagrant
using Ubuntu Trusty:
==> ci-slave-4: Error: Could not prefetch package provider 'pip': [nil, nil, [(provider=pip)], nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil]
When I tried to run `pip` on the Vagrant box, I got the following error:
vagrant@ci-slave-4:~$ pip
Traceback (most recent call last):
File "/usr/bin/pip", line 9, in <module>
load_entry_point('pip==1.5.4', 'console_scripts', 'pip')()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 351, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2363, in load_entry_point
return ep.load()
File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 2088, in load
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
File "/usr/lib/python2.7/dist-packages/pip/__init__.py", line 11, in <module>
from pip.vcs import git, mercurial, subversion, bazaar # noqa
File "/usr/lib/python2.7/dist-packages/pip/vcs/mercurial.py", line 9, in <module>
from pip.download import path_to_url
File "/usr/lib/python2.7/dist-packages/pip/download.py", line 25, in <module>
from requests.compat import IncompleteRead
ImportError: cannot import name IncompleteRead
vagrant@ci-slave-4:~$
By installing requiring the exact same version of `requests` as the one
provided by the system under Ubuntu Trusty, Pip no longer needs to
install the `requests` module and `ghtools` will use the system module.
This is also tested under Precise and does not break Pip.
I verified the version of `requests` installed on Ubuntu Trusty prior to
installing `ghtools`:
vagrant@jumpbox-2:~/ghtools$ python
Python 2.7.6 (default, Mar 22 2014, 22:59:56)
[GCC 4.8.2] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import requests
>>> print requests.__version__
2.2.1
>>>
When installing `ghtools`, Pip detects the existing system version and
will not install its own:
vagrant@jumpbox-2:~/ghtools$ sudo pip install .
Unpacking /home/vagrant/ghtools
Running setup.py (path:/tmp/user/0/pip-0GLR1h-build/setup.py) egg_info for package from file:///home/vagrant/ghtools
Requirement already satisfied (use --upgrade to upgrade): requests==2.2.1 in /usr/lib/python2.7/dist-packages (from ghtools==0.21.0)
[...]
For more context, please see this bug report (though the bug is not in
python-pip):
https://bugs.launchpad.net/ubuntu/+source/python-pip/+bug/1306991
[1]: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=766419<commit_after>import os
from setuptools import setup, find_packages
from ghtools import __version__
requirements = [
'requests==2.2.1',
'argh==0.23.0'
]
python_scripts = [
'browse',
'list-members',
'login',
'migrate-project',
'migrate-wiki',
'migrate-teams',
'org',
'repo',
'status',
]
HERE = os.path.dirname(__file__)
try:
long_description = open(os.path.join(HERE, 'README.rst')).read()
except:
long_description = None
setup(
name='ghtools',
version=__version__,
packages=find_packages(exclude=['test*']),
# metadata for upload to PyPI
author='Nick Stenning',
author_email='nick@whiteink.com',
maintainer='Government Digital Service',
url='https://github.com/alphagov/ghtools',
description='ghtools: tools for interacting with the GitHub API',
long_description=long_description,
license='MIT',
keywords='sysadmin git github api',
install_requires=requirements,
entry_points={
'console_scripts': [
'gh-{0}=ghtools.command.{1}:main'.format(s, s.replace('-', '_')) for s in python_scripts
]
}
)
|
d4137375513e22e9fda3ad6abb53e99492101727
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = my_package.some_module:main_func',
]
})
|
from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = cmakelists_parsing.parsing:main',
]
})
|
Fix entry point for pretty printing script.
|
Fix entry point for pretty printing script.
|
Python
|
mit
|
ijt/cmakelists_parsing,wjwwood/parse_cmake
|
from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = my_package.some_module:main_func',
]
})
Fix entry point for pretty printing script.
|
from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = cmakelists_parsing.parsing:main',
]
})
|
<commit_before>from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = my_package.some_module:main_func',
]
})
<commit_msg>Fix entry point for pretty printing script.<commit_after>
|
from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = cmakelists_parsing.parsing:main',
]
})
|
from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = my_package.some_module:main_func',
]
})
Fix entry point for pretty printing script.from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = cmakelists_parsing.parsing:main',
]
})
|
<commit_before>from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = my_package.some_module:main_func',
]
})
<commit_msg>Fix entry point for pretty printing script.<commit_after>from setuptools import setup, find_packages
# Prevent "TypeError: 'NoneType' object is not callable" error
# when running python setup.py test
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing
except ImportError:
pass
setup(
name='cmakelists_parsing',
version='0.1',
author='Issac Trotts',
author_email='itrotts@willowgarage.com',
url='http://github.com/ijt/cmakelists_parsing',
description='Parser for CMakeLists.txt files',
packages=find_packages(),
zip_safe=False,
install_requires=['pyPEG2'],
tests_require=['nose'],
test_suite='nose.collector',
include_package_data=True,
entry_points = {
'console_scripts': [
'pprint_cmakelists = cmakelists_parsing.parsing:main',
]
})
|
c48c0a3c032eb92c7e10d42466381f15c643bbe2
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
|
from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
print("WARNING: Did not create pypi readme")
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
|
Add warning if large_description is not generated
|
Add warning if large_description is not generated
|
Python
|
mit
|
khazhyk/osuapi
|
from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
Add warning if large_description is not generated
|
from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
print("WARNING: Did not create pypi readme")
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
|
<commit_before>from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
<commit_msg>Add warning if large_description is not generated<commit_after>
|
from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
print("WARNING: Did not create pypi readme")
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
|
from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
Add warning if large_description is not generatedfrom setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
print("WARNING: Did not create pypi readme")
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
|
<commit_before>from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
<commit_msg>Add warning if large_description is not generated<commit_after>from setuptools import setup, find_packages
import os
from osuapi import __version__ as version, __title__ as name, __author__ as author, __license__ as license
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
import pypandoc
with open('README.md') as readme_md:
readme = pypandoc.convert_text(readme_md.read(), 'rst', 'markdown')
except:
print("WARNING: Did not create pypi readme")
readme = None
setup(
name=name,
version=version,
author=author,
url="https://github.com/khazhyk/osuapi",
license="MIT",
long_description=readme,
keywords="osu",
packages=find_packages(),
description="osu! api wrapper.",
classifiers=[
"Development Status :: 1 - Planning",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Utilities"
]
)
|
d8d62288da0339bb0b5414d18eca8aab24b61238
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.0',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.1',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
|
Bump version number for the next release.
|
Bump version number for the next release.
|
Python
|
bsd-3-clause
|
Sean1708/Regetron3.0,Sean1708/Regetron3.0
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.0',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
Bump version number for the next release.
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.1',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.0',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
<commit_msg>Bump version number for the next release.<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.1',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.0',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
Bump version number for the next release.try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.1',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.0',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
<commit_msg>Bump version number for the next release.<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'regetron is a simple shell for playing with regular expressions',
'author': 'Zed A. Shaw',
'url': 'https://gitorious.org/regetron/regetron',
'download_url': 'http://pypi.python.org/pypi/regetron',
'author_email': 'zedshaw@zedshaw.com',
'version': '1.1',
'install_requires': [],
'packages': ['regetron'],
'scripts': ['bin/regetron'],
'name': 'regetron'
}
setup(**config)
|
462efd1a2ee217b8a70d1769f2fae9265f54fc4f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"click",
"pyyaml"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
Add pyyaml and use lower case lib names
|
Add pyyaml and use lower case lib names
|
Python
|
bsd-2-clause
|
dbaelz/adbons
|
from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
Add pyyaml and use lower case lib names
|
from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"click",
"pyyaml"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
<commit_msg>Add pyyaml and use lower case lib names<commit_after>
|
from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"click",
"pyyaml"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
Add pyyaml and use lower case lib namesfrom setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"click",
"pyyaml"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"Click"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
<commit_msg>Add pyyaml and use lower case lib names<commit_after>from setuptools import setup, find_packages
setup(
name="adbons",
version="0.0.1",
author="Daniel Bälz",
author_email="me@dbaelz.de",
description="""A wrapper for the Android adb tool.
It's just adb on steroids""",
license="BSD",
packages=find_packages(),
include_package_data=True,
install_requires=[
"click",
"pyyaml"
],
entry_points={
'console_scripts': ['adbons=src.adbons:cli']
},
classifiers=[
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
1cbf66453e2808e8c15628b41e37e96c93cc77db
|
great_expectations_airflow/hooks/db_hook.py
|
great_expectations_airflow/hooks/db_hook.py
|
from airflow.hooks.dbapi_hook import DbApiHook
import great_expectations as ge
class ExpectationMySQLHook(DbApiHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
|
import great_expectations as ge
from airflow.hooks.mysql_hook import MySqlHook
class ExpectationMySQLHook(MySqlHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
|
Make sure hook can actually be instantiated (generic DbApiHook cannot)
|
Make sure hook can actually be instantiated (generic DbApiHook cannot)
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
from airflow.hooks.dbapi_hook import DbApiHook
import great_expectations as ge
class ExpectationMySQLHook(DbApiHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
Make sure hook can actually be instantiated (generic DbApiHook cannot)
|
import great_expectations as ge
from airflow.hooks.mysql_hook import MySqlHook
class ExpectationMySQLHook(MySqlHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
|
<commit_before>from airflow.hooks.dbapi_hook import DbApiHook
import great_expectations as ge
class ExpectationMySQLHook(DbApiHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
<commit_msg>Make sure hook can actually be instantiated (generic DbApiHook cannot)<commit_after>
|
import great_expectations as ge
from airflow.hooks.mysql_hook import MySqlHook
class ExpectationMySQLHook(MySqlHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
|
from airflow.hooks.dbapi_hook import DbApiHook
import great_expectations as ge
class ExpectationMySQLHook(DbApiHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
Make sure hook can actually be instantiated (generic DbApiHook cannot)import great_expectations as ge
from airflow.hooks.mysql_hook import MySqlHook
class ExpectationMySQLHook(MySqlHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
|
<commit_before>from airflow.hooks.dbapi_hook import DbApiHook
import great_expectations as ge
class ExpectationMySQLHook(DbApiHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
<commit_msg>Make sure hook can actually be instantiated (generic DbApiHook cannot)<commit_after>import great_expectations as ge
from airflow.hooks.mysql_hook import MySqlHook
class ExpectationMySQLHook(MySqlHook):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_ge_df(self, dataset_name, **kwargs):
self.log.info("Connecting to dataset {dataset} on {uri}".format(uri=self.get_uri(), dataset=dataset_name))
sql_context = ge.get_data_context('SqlAlchemy', self.get_uri())
return sql_context.get_dataset(dataset_name=dataset_name, **kwargs)
|
3443a7355888a9a4fbd6b9de8c8f84c88643e5af
|
confluent_client/setup.py
|
confluent_client/setup.py
|
from setuptools import setup
setup(
name='confluent_client',
version='0.1.1',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.1'],
scripts=['bin/confetty'],
)
|
from setuptools import setup
setup(
name='confluent_client',
version='0.1.2',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.0'],
scripts=['bin/confetty'],
)
|
Refresh package data to assure better pyghmi and confetty
|
Refresh package data to assure better pyghmi and confetty
|
Python
|
apache-2.0
|
jufm/confluent,chenglch/confluent,whowutwut/confluent,jjohnson42/confluent,xcat2/confluent,chenglch/confluent,whowutwut/confluent,michaelfardu/thinkconfluent,whowutwut/confluent,michaelfardu/thinkconfluent,jufm/confluent,jufm/confluent,xcat2/confluent,michaelfardu/thinkconfluent,chenglch/confluent,chenglch/confluent,jjohnson42/confluent,chenglch/confluent,jufm/confluent,jufm/confluent,michaelfardu/thinkconfluent,jjohnson42/confluent,whowutwut/confluent,jjohnson42/confluent,xcat2/confluent,jjohnson42/confluent,xcat2/confluent,xcat2/confluent,michaelfardu/thinkconfluent
|
from setuptools import setup
setup(
name='confluent_client',
version='0.1.1',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.1'],
scripts=['bin/confetty'],
)
Refresh package data to assure better pyghmi and confetty
|
from setuptools import setup
setup(
name='confluent_client',
version='0.1.2',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.0'],
scripts=['bin/confetty'],
)
|
<commit_before>from setuptools import setup
setup(
name='confluent_client',
version='0.1.1',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.1'],
scripts=['bin/confetty'],
)
<commit_msg>Refresh package data to assure better pyghmi and confetty<commit_after>
|
from setuptools import setup
setup(
name='confluent_client',
version='0.1.2',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.0'],
scripts=['bin/confetty'],
)
|
from setuptools import setup
setup(
name='confluent_client',
version='0.1.1',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.1'],
scripts=['bin/confetty'],
)
Refresh package data to assure better pyghmi and confettyfrom setuptools import setup
setup(
name='confluent_client',
version='0.1.2',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.0'],
scripts=['bin/confetty'],
)
|
<commit_before>from setuptools import setup
setup(
name='confluent_client',
version='0.1.1',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.1'],
scripts=['bin/confetty'],
)
<commit_msg>Refresh package data to assure better pyghmi and confetty<commit_after>from setuptools import setup
setup(
name='confluent_client',
version='0.1.2',
author='Jarrod Johnson',
author_email='jbjohnso@us.ibm.com',
url='http://xcat.sf.net/',
packages=['confluent'],
install_requires=['confluent_common>=0.1.0'],
scripts=['bin/confetty'],
)
|
e68aaef747f5a2ced06f74249d49d0ed81551d23
|
test/test_functionality.py
|
test/test_functionality.py
|
"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys):
"""
Check simple audit gives the expected output
"""
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
|
"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys, monkeypatch):
"""
Check simple audit gives the expected output
"""
def mockreturn(path):
return '9c3bb3efa8095f36aafd9bf3a698efe439505021'
monkeypatch.setattr(toolaudit.readers, 'sha1_file', mockreturn)
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
|
Use mocking to fix the result of hashing files
|
Use mocking to fix the result of hashing files
|
Python
|
mit
|
jstutters/toolaudit
|
"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys):
"""
Check simple audit gives the expected output
"""
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
Use mocking to fix the result of hashing files
|
"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys, monkeypatch):
"""
Check simple audit gives the expected output
"""
def mockreturn(path):
return '9c3bb3efa8095f36aafd9bf3a698efe439505021'
monkeypatch.setattr(toolaudit.readers, 'sha1_file', mockreturn)
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
|
<commit_before>"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys):
"""
Check simple audit gives the expected output
"""
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
<commit_msg>Use mocking to fix the result of hashing files<commit_after>
|
"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys, monkeypatch):
"""
Check simple audit gives the expected output
"""
def mockreturn(path):
return '9c3bb3efa8095f36aafd9bf3a698efe439505021'
monkeypatch.setattr(toolaudit.readers, 'sha1_file', mockreturn)
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
|
"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys):
"""
Check simple audit gives the expected output
"""
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
Use mocking to fix the result of hashing files"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys, monkeypatch):
"""
Check simple audit gives the expected output
"""
def mockreturn(path):
return '9c3bb3efa8095f36aafd9bf3a698efe439505021'
monkeypatch.setattr(toolaudit.readers, 'sha1_file', mockreturn)
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
|
<commit_before>"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys):
"""
Check simple audit gives the expected output
"""
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
<commit_msg>Use mocking to fix the result of hashing files<commit_after>"""
Tests of overall functionality
"""
import pytest
import toolaudit
import yaml
def test_simple_audit(capsys, monkeypatch):
"""
Check simple audit gives the expected output
"""
def mockreturn(path):
return '9c3bb3efa8095f36aafd9bf3a698efe439505021'
monkeypatch.setattr(toolaudit.readers, 'sha1_file', mockreturn)
app = toolaudit.application.ToolauditApp()
try:
app.run(kitlist_file='test/example.yaml')
except SystemExit:
pass
out, err = capsys.readouterr()
returned_yaml = yaml.load(out)
assert returned_yaml['tools'][0]['checksum'] == '9c3bb3efa8095f36aafd9bf3a698efe439505021'
|
6de9b3215ac9d3a2b5dc97af5e5fe02886d4bfe1
|
pywikibot/families/wikitech_family.py
|
pywikibot/families/wikitech_family.py
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
Remove overide of default scriptpath
|
Remove overide of default scriptpath
|
Python
|
mit
|
pywikibot/core-migration-example
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
Remove overide of default scriptpath
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
<commit_msg>Remove overide of default scriptpath<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
Remove overide of default scriptpath# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
def scriptpath(self, code):
return ''
<commit_msg>Remove overide of default scriptpath<commit_after># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikitech family
class Family(family.Family):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikitech'
self.langs = {
'en': 'wikitech.wikimedia.org',
}
def version(self, code):
return '1.21wmf8'
|
4a032ece106d4b3b3764420197453afd33475bf6
|
donut/modules/permissions/helpers.py
|
donut/modules/permissions/helpers.py
|
import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
if {'user_id': user_id} in holders:
return True
return False
|
import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
holders = [row['user_id'] for row in holders]
if user_id in holders:
return True
return False
|
Fix failing test and make lint
|
Fix failing test and make lint
|
Python
|
mit
|
ASCIT/donut-python,ASCIT/donut,ASCIT/donut,ASCIT/donut-python,ASCIT/donut
|
import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
if {'user_id': user_id} in holders:
return True
return False
Fix failing test and make lint
|
import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
holders = [row['user_id'] for row in holders]
if user_id in holders:
return True
return False
|
<commit_before>import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
if {'user_id': user_id} in holders:
return True
return False
<commit_msg>Fix failing test and make lint<commit_after>
|
import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
holders = [row['user_id'] for row in holders]
if user_id in holders:
return True
return False
|
import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
if {'user_id': user_id} in holders:
return True
return False
Fix failing test and make lintimport flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
holders = [row['user_id'] for row in holders]
if user_id in holders:
return True
return False
|
<commit_before>import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
if {'user_id': user_id} in holders:
return True
return False
<commit_msg>Fix failing test and make lint<commit_after>import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
holders = [row['user_id'] for row in holders]
if user_id in holders:
return True
return False
|
dd5a292320f657a4b5e776c6e0d99fad5916e6e6
|
source/fiblist/conf/urls.py
|
source/fiblist/conf/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
# from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
|
Remove customer server error view temporarily.
|
Remove customer server error view temporarily.
|
Python
|
unlicense
|
nicorellius/fiblist,nicorellius/fiblist
|
from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
Remove customer server error view temporarily.
|
from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
# from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
<commit_msg>Remove customer server error view temporarily.<commit_after>
|
from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
# from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
Remove customer server error view temporarily.from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
# from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
<commit_msg>Remove customer server error view temporarily.<commit_after>from django.conf.urls import include, url
from django.contrib import admin
from lists.views import home_page
# from core.views import custom_server_error
# error pages
# handler400 = 'core.views.custom_bad_request'
# handler403 = 'core.views.custom_permission_denied'
# handler404 = 'core.views.custom_page_not_found'
# handler500 = 'core.views.custom_server_error'
# handler502 = 'core.views.custom_bad_gateway'
urlpatterns = [
url(r'^$', home_page, name='home'),
url(r'^lists/', include('lists.urls')),
url(r'^admin/', include(admin.site.urls)),
# url(r'^(\d)/$', custom_server_error, name='custom-server-error'),
]
|
30ffff16e5dd4eec6e5128a277a677834470be73
|
scikits/learn/tests/test_cross_val.py
|
scikits/learn/tests/test_cross_val.py
|
""" Test the cross_val module
"""
import numpy as np
import nose
from .. import cross_val
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
|
""" Test the cross_val module
"""
import numpy as np
import nose
from ..base import BaseEstimator
from .. import cross_val
class MockClassifier(BaseEstimator):
"""Dummy classifier to test the cross-validation
"""
def __init__(self, a=0):
self.a = a
def fit(self, X, Y, **params):
self._set_params(**params)
return self
def predict(self, T):
return T.shape[0]
def score(self, X=None, Y=None):
return 1./(1+np.abs(self.a))
X = np.ones((10, 2))
y = np.arange(10)/2
################################################################################
# Tests
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
score = cross_val.cross_val_score(clf, X, y)
np.testing.assert_array_equal(score, clf.score(X, y))
|
Add a smoke test for cross_val_score
|
TEST: Add a smoke test for cross_val_score
|
Python
|
bsd-3-clause
|
mjgrav2001/scikit-learn,marcocaccin/scikit-learn,jmschrei/scikit-learn,ycaihua/scikit-learn,sarahgrogan/scikit-learn,xubenben/scikit-learn,appapantula/scikit-learn,mehdidc/scikit-learn,bikong2/scikit-learn,cainiaocome/scikit-learn,hsiaoyi0504/scikit-learn,Titan-C/scikit-learn,hsuantien/scikit-learn,kmike/scikit-learn,Vimos/scikit-learn,pythonvietnam/scikit-learn,AlexanderFabisch/scikit-learn,madjelan/scikit-learn,hsiaoyi0504/scikit-learn,ycaihua/scikit-learn,madjelan/scikit-learn,mhue/scikit-learn,qifeigit/scikit-learn,RPGOne/scikit-learn,tmhm/scikit-learn,jseabold/scikit-learn,alvarofierroclavero/scikit-learn,thientu/scikit-learn,dingocuster/scikit-learn,yonglehou/scikit-learn,dingocuster/scikit-learn,massmutual/scikit-learn,rvraghav93/scikit-learn,pypot/scikit-learn,hainm/scikit-learn,giorgiop/scikit-learn,rsivapr/scikit-learn,vortex-ape/scikit-learn,bikong2/scikit-learn,sergeyf/scikit-learn,vybstat/scikit-learn,ltiao/scikit-learn,schets/scikit-learn,lenovor/scikit-learn,Lawrence-Liu/scikit-learn,mjudsp/Tsallis,moutai/scikit-learn,ElDeveloper/scikit-learn,xwolf12/scikit-learn,ChanderG/scikit-learn,Windy-Ground/scikit-learn,devanshdalal/scikit-learn,zorojean/scikit-learn,deepesch/scikit-learn,chrisburr/scikit-learn,chrisburr/scikit-learn,loli/sklearn-ensembletrees,luo66/scikit-learn,pompiduskus/scikit-learn,pompiduskus/scikit-learn,jorik041/scikit-learn,lin-credible/scikit-learn,plissonf/scikit-learn,alexeyum/scikit-learn,trankmichael/scikit-learn,nomadcube/scikit-learn,AlexRobson/scikit-learn,belltailjp/scikit-learn,ivannz/scikit-learn,PatrickChrist/scikit-learn,cainiaocome/scikit-learn,rishikksh20/scikit-learn,marcocaccin/scikit-learn,jpautom/scikit-learn,fbagirov/scikit-learn,JsNoNo/scikit-learn,NunoEdgarGub1/scikit-learn,zorojean/scikit-learn,potash/scikit-learn,hdmetor/scikit-learn,alexsavio/scikit-learn,phdowling/scikit-learn,cwu2011/scikit-learn,costypetrisor/scikit-learn,carrillo/scikit-learn,hdmetor/scikit-learn,zaxtax/scikit-learn,chrsrds/scikit-learn,vinayak-mehta/scikit-learn,lenovor/scikit-learn,nomadcube/scikit-learn,moutai/scikit-learn,LiaoPan/scikit-learn,cainiaocome/scikit-learn,cwu2011/scikit-learn,HolgerPeters/scikit-learn,liyu1990/sklearn,henrykironde/scikit-learn,xyguo/scikit-learn,NelisVerhoef/scikit-learn,Nyker510/scikit-learn,costypetrisor/scikit-learn,hdmetor/scikit-learn,rohanp/scikit-learn,AlexRobson/scikit-learn,tomlof/scikit-learn,YinongLong/scikit-learn,eickenberg/scikit-learn,ChanChiChoi/scikit-learn,jseabold/scikit-learn,fabioticconi/scikit-learn,mwv/scikit-learn,krez13/scikit-learn,lbishal/scikit-learn,MatthieuBizien/scikit-learn,lucidfrontier45/scikit-learn,simon-pepin/scikit-learn,phdowling/scikit-learn,moutai/scikit-learn,YinongLong/scikit-learn,bhargav/scikit-learn,sgenoud/scikit-learn,xzh86/scikit-learn,abhishekkrthakur/scikit-learn,herilalaina/scikit-learn,robin-lai/scikit-learn,yonglehou/scikit-learn,cwu2011/scikit-learn,jkarnows/scikit-learn,sergeyf/scikit-learn,xubenben/scikit-learn,nelson-liu/scikit-learn,trungnt13/scikit-learn,jayflo/scikit-learn,ningchi/scikit-learn,Windy-Ground/scikit-learn,samzhang111/scikit-learn,RachitKansal/scikit-learn,mxjl620/scikit-learn,ominux/scikit-learn,ZenDevelopmentSystems/scikit-learn,russel1237/scikit-learn,btabibian/scikit-learn,lbishal/scikit-learn,ChanChiChoi/scikit-learn,xiaoxiamii/scikit-learn,3manuek/scikit-learn,icdishb/scikit-learn,yunfeilu/scikit-learn,rexshihaoren/scikit-learn,rsivapr/scikit-learn,DonBeo/scikit-learn,MohammedWasim/scikit-learn,khkaminska/scikit-learn,bikong2/scikit-learn,smartscheduling/scikit-learn-categorical-tree,liyu1990/sklearn,zorroblue/scikit-learn,alvarofierroclavero/scikit-learn,aflaxman/scikit-learn,xavierwu/scikit-learn,jkarnows/scikit-learn,mwv/scikit-learn,mattgiguere/scikit-learn,liberatorqjw/scikit-learn,beepee14/scikit-learn,jm-begon/scikit-learn,mhdella/scikit-learn,Garrett-R/scikit-learn,zaxtax/scikit-learn,shahankhatch/scikit-learn,altairpearl/scikit-learn,idlead/scikit-learn,jpautom/scikit-learn,aabadie/scikit-learn,henrykironde/scikit-learn,kevin-intel/scikit-learn,Jimmy-Morzaria/scikit-learn,saiwing-yeung/scikit-learn,betatim/scikit-learn,sanketloke/scikit-learn,arabenjamin/scikit-learn,yunfeilu/scikit-learn,florian-f/sklearn,kevin-intel/scikit-learn,ominux/scikit-learn,andaag/scikit-learn,zuku1985/scikit-learn,BiaDarkia/scikit-learn,huzq/scikit-learn,huobaowangxi/scikit-learn,RomainBrault/scikit-learn,AlexanderFabisch/scikit-learn,amueller/scikit-learn,manhhomienbienthuy/scikit-learn,walterreade/scikit-learn,MartinDelzant/scikit-learn,nesterione/scikit-learn,murali-munna/scikit-learn,HolgerPeters/scikit-learn,iismd17/scikit-learn,ilyes14/scikit-learn,ahoyosid/scikit-learn,shangwuhencc/scikit-learn,simon-pepin/scikit-learn,q1ang/scikit-learn,roxyboy/scikit-learn,ClimbsRocks/scikit-learn,adamgreenhall/scikit-learn,0asa/scikit-learn,yonglehou/scikit-learn,liangz0707/scikit-learn,lin-credible/scikit-learn,mjgrav2001/scikit-learn,ZenDevelopmentSystems/scikit-learn,raghavrv/scikit-learn,mugizico/scikit-learn,h2educ/scikit-learn,btabibian/scikit-learn,jorge2703/scikit-learn,russel1237/scikit-learn,MartinDelzant/scikit-learn,nmayorov/scikit-learn,mayblue9/scikit-learn,shikhardb/scikit-learn,ashhher3/scikit-learn,stylianos-kampakis/scikit-learn,jorik041/scikit-learn,vibhorag/scikit-learn,kagayakidan/scikit-learn,ngoix/OCRF,sonnyhu/scikit-learn,manhhomienbienthuy/scikit-learn,joshloyal/scikit-learn,harshaneelhg/scikit-learn,kmike/scikit-learn,Sentient07/scikit-learn,xuewei4d/scikit-learn,waterponey/scikit-learn,olologin/scikit-learn,tomlof/scikit-learn,dhruv13J/scikit-learn,treycausey/scikit-learn,ankurankan/scikit-learn,466152112/scikit-learn,evgchz/scikit-learn,khkaminska/scikit-learn,jmetzen/scikit-learn,ilo10/scikit-learn,mayblue9/scikit-learn,jorge2703/scikit-learn,idlead/scikit-learn,mojoboss/scikit-learn,bhargav/scikit-learn,manashmndl/scikit-learn,pythonvietnam/scikit-learn,aetilley/scikit-learn,CVML/scikit-learn,jakirkham/scikit-learn,shenzebang/scikit-learn,mlyundin/scikit-learn,ilyes14/scikit-learn,3manuek/scikit-learn,ishanic/scikit-learn,terkkila/scikit-learn,ky822/scikit-learn,huzq/scikit-learn,jjx02230808/project0223,btabibian/scikit-learn,yunfeilu/scikit-learn,OshynSong/scikit-learn,huobaowangxi/scikit-learn,clemkoa/scikit-learn,jaidevd/scikit-learn,glennq/scikit-learn,UNR-AERIAL/scikit-learn,joshloyal/scikit-learn,rishikksh20/scikit-learn,tosolveit/scikit-learn,schets/scikit-learn,mattilyra/scikit-learn,vybstat/scikit-learn,nvoron23/scikit-learn,jorge2703/scikit-learn,beepee14/scikit-learn,Akshay0724/scikit-learn,evgchz/scikit-learn,ngoix/OCRF,yanlend/scikit-learn,xuewei4d/scikit-learn,aflaxman/scikit-learn,PatrickChrist/scikit-learn,Akshay0724/scikit-learn,loli/semisupervisedforests,AlexandreAbraham/scikit-learn,kashif/scikit-learn,massmutual/scikit-learn,DSLituiev/scikit-learn,mjgrav2001/scikit-learn,hlin117/scikit-learn,IndraVikas/scikit-learn,MartinSavc/scikit-learn,OshynSong/scikit-learn,bikong2/scikit-learn,xubenben/scikit-learn,anntzer/scikit-learn,ningchi/scikit-learn,3manuek/scikit-learn,fengzhyuan/scikit-learn,mattgiguere/scikit-learn,cybernet14/scikit-learn,dsullivan7/scikit-learn,shahankhatch/scikit-learn,aminert/scikit-learn,amueller/scikit-learn,sinhrks/scikit-learn,robin-lai/scikit-learn,UNR-AERIAL/scikit-learn,mlyundin/scikit-learn,bthirion/scikit-learn,pompiduskus/scikit-learn,spallavolu/scikit-learn,sonnyhu/scikit-learn,mattilyra/scikit-learn,murali-munna/scikit-learn,B3AU/waveTree,LohithBlaze/scikit-learn,huobaowangxi/scikit-learn,Sentient07/scikit-learn,yanlend/scikit-learn,JPFrancoia/scikit-learn,harshaneelhg/scikit-learn,rohanp/scikit-learn,petosegan/scikit-learn,Titan-C/scikit-learn,h2educ/scikit-learn,andrewnc/scikit-learn,equialgo/scikit-learn,nesterione/scikit-learn,sarahgrogan/scikit-learn,Garrett-R/scikit-learn,gotomypc/scikit-learn,dhruv13J/scikit-learn,Garrett-R/scikit-learn,rrohan/scikit-learn,jakirkham/scikit-learn,hainm/scikit-learn,BiaDarkia/scikit-learn,qifeigit/scikit-learn,andrewnc/scikit-learn,meduz/scikit-learn,wazeerzulfikar/scikit-learn,olologin/scikit-learn,jereze/scikit-learn,meduz/scikit-learn,vybstat/scikit-learn,shusenl/scikit-learn,ycaihua/scikit-learn,tomlof/scikit-learn,nikitasingh981/scikit-learn,imaculate/scikit-learn,larsmans/scikit-learn,appapantula/scikit-learn,mhue/scikit-learn,NelisVerhoef/scikit-learn,jpautom/scikit-learn,ElDeveloper/scikit-learn,massmutual/scikit-learn,vivekmishra1991/scikit-learn,ChanChiChoi/scikit-learn,marcocaccin/scikit-learn,akionakamura/scikit-learn,hsuantien/scikit-learn,glennq/scikit-learn,mattilyra/scikit-learn,themrmax/scikit-learn,henridwyer/scikit-learn,bthirion/scikit-learn,lucidfrontier45/scikit-learn,depet/scikit-learn,jorik041/scikit-learn,mfjb/scikit-learn,procoder317/scikit-learn,kaichogami/scikit-learn,untom/scikit-learn,Srisai85/scikit-learn,liyu1990/sklearn,pnedunuri/scikit-learn,jkarnows/scikit-learn,liberatorqjw/scikit-learn,nrhine1/scikit-learn,joernhees/scikit-learn,MohammedWasim/scikit-learn,waterponey/scikit-learn,ominux/scikit-learn,idlead/scikit-learn,andaag/scikit-learn,loli/semisupervisedforests,raghavrv/scikit-learn,frank-tancf/scikit-learn,Jimmy-Morzaria/scikit-learn,abhishekkrthakur/scikit-learn,jmetzen/scikit-learn,maheshakya/scikit-learn,Nyker510/scikit-learn,Myasuka/scikit-learn,abhishekkrthakur/scikit-learn,ephes/scikit-learn,ssaeger/scikit-learn,pv/scikit-learn,anurag313/scikit-learn,cl4rke/scikit-learn,sinhrks/scikit-learn,rrohan/scikit-learn,plissonf/scikit-learn,akionakamura/scikit-learn,vshtanko/scikit-learn,BiaDarkia/scikit-learn,billy-inn/scikit-learn,vybstat/scikit-learn,cauchycui/scikit-learn,glennq/scikit-learn,tdhopper/scikit-learn,thilbern/scikit-learn,florian-f/sklearn,arjoly/scikit-learn,sgenoud/scikit-learn,hsiaoyi0504/scikit-learn,JPFrancoia/scikit-learn,deepesch/scikit-learn,jlegendary/scikit-learn,larsmans/scikit-learn,thientu/scikit-learn,kagayakidan/scikit-learn,B3AU/waveTree,potash/scikit-learn,arabenjamin/scikit-learn,devanshdalal/scikit-learn,jmschrei/scikit-learn,Achuth17/scikit-learn,JosmanPS/scikit-learn,Fireblend/scikit-learn,sumspr/scikit-learn,samuel1208/scikit-learn,aflaxman/scikit-learn,rahul-c1/scikit-learn,RachitKansal/scikit-learn,ssaeger/scikit-learn,xuewei4d/scikit-learn,clemkoa/scikit-learn,zuku1985/scikit-learn,LiaoPan/scikit-learn,DSLituiev/scikit-learn,dingocuster/scikit-learn,arahuja/scikit-learn,anurag313/scikit-learn,icdishb/scikit-learn,vermouthmjl/scikit-learn,OshynSong/scikit-learn,victorbergelin/scikit-learn,ahoyosid/scikit-learn,rsivapr/scikit-learn,djgagne/scikit-learn,anirudhjayaraman/scikit-learn,rvraghav93/scikit-learn,mojoboss/scikit-learn,cauchycui/scikit-learn,kaichogami/scikit-learn,larsmans/scikit-learn,kevin-intel/scikit-learn,YinongLong/scikit-learn,xzh86/scikit-learn,ndingwall/scikit-learn,Djabbz/scikit-learn,MartinDelzant/scikit-learn,nrhine1/scikit-learn,xiaoxiamii/scikit-learn,cauchycui/scikit-learn,tawsifkhan/scikit-learn,pratapvardhan/scikit-learn,anntzer/scikit-learn,ningchi/scikit-learn,roxyboy/scikit-learn,ilo10/scikit-learn,ashhher3/scikit-learn,thilbern/scikit-learn,pypot/scikit-learn,liyu1990/sklearn,fabianp/scikit-learn,manashmndl/scikit-learn,belltailjp/scikit-learn,yunfeilu/scikit-learn,TomDLT/scikit-learn,kjung/scikit-learn,henridwyer/scikit-learn,simon-pepin/scikit-learn,plissonf/scikit-learn,mrshu/scikit-learn,imaculate/scikit-learn,mhue/scikit-learn,untom/scikit-learn,wlamond/scikit-learn,fbagirov/scikit-learn,andaag/scikit-learn,russel1237/scikit-learn,depet/scikit-learn,3manuek/scikit-learn,davidgbe/scikit-learn,tmhm/scikit-learn,voxlol/scikit-learn,jlegendary/scikit-learn,Fireblend/scikit-learn,samzhang111/scikit-learn,madjelan/scikit-learn,mhdella/scikit-learn,jseabold/scikit-learn,trankmichael/scikit-learn,aabadie/scikit-learn,rajat1994/scikit-learn,scikit-learn/scikit-learn,spallavolu/scikit-learn,manhhomienbienthuy/scikit-learn,michigraber/scikit-learn,sinhrks/scikit-learn,kylerbrown/scikit-learn,ClimbsRocks/scikit-learn,arabenjamin/scikit-learn,fredhusser/scikit-learn,vinayak-mehta/scikit-learn,AIML/scikit-learn,heli522/scikit-learn,shangwuhencc/scikit-learn,TomDLT/scikit-learn,siutanwong/scikit-learn,Djabbz/scikit-learn,Nyker510/scikit-learn,lazywei/scikit-learn,michigraber/scikit-learn,espg/scikit-learn,Vimos/scikit-learn,shenzebang/scikit-learn,samuel1208/scikit-learn,Fireblend/scikit-learn,smartscheduling/scikit-learn-categorical-tree,nmayorov/scikit-learn,glouppe/scikit-learn,vinayak-mehta/scikit-learn,ashhher3/scikit-learn,pnedunuri/scikit-learn,DonBeo/scikit-learn,nhejazi/scikit-learn,heli522/scikit-learn,Achuth17/scikit-learn,sanketloke/scikit-learn,toastedcornflakes/scikit-learn,untom/scikit-learn,hitszxp/scikit-learn,mjgrav2001/scikit-learn,qifeigit/scikit-learn,fzalkow/scikit-learn,nesterione/scikit-learn,cdegroc/scikit-learn,shyamalschandra/scikit-learn,jmschrei/scikit-learn,Jimmy-Morzaria/scikit-learn,justincassidy/scikit-learn,joshloyal/scikit-learn,IssamLaradji/scikit-learn,treycausey/scikit-learn,fabianp/scikit-learn,jakobworldpeace/scikit-learn,JeanKossaifi/scikit-learn,Jimmy-Morzaria/scikit-learn,JPFrancoia/scikit-learn,NelisVerhoef/scikit-learn,adamgreenhall/scikit-learn,carrillo/scikit-learn,JeanKossaifi/scikit-learn,jzt5132/scikit-learn,costypetrisor/scikit-learn,schets/scikit-learn,shyamalschandra/scikit-learn,jaidevd/scikit-learn,zhenv5/scikit-learn,manashmndl/scikit-learn,robbymeals/scikit-learn,ivannz/scikit-learn,Srisai85/scikit-learn,hrjn/scikit-learn,fredhusser/scikit-learn,fyffyt/scikit-learn,AIML/scikit-learn,RomainBrault/scikit-learn,AlexanderFabisch/scikit-learn,kagayakidan/scikit-learn,CVML/scikit-learn,aminert/scikit-learn,fabianp/scikit-learn,Clyde-fare/scikit-learn,manhhomienbienthuy/scikit-learn,mxjl620/scikit-learn,Srisai85/scikit-learn,ominux/scikit-learn,zihua/scikit-learn,hugobowne/scikit-learn,ilo10/scikit-learn,smartscheduling/scikit-learn-categorical-tree,JosmanPS/scikit-learn,mattgiguere/scikit-learn,krez13/scikit-learn,nesterione/scikit-learn,cl4rke/scikit-learn,maheshakya/scikit-learn,fredhusser/scikit-learn,glemaitre/scikit-learn,waterponey/scikit-learn,IshankGulati/scikit-learn,ilo10/scikit-learn,ycaihua/scikit-learn,hitszxp/scikit-learn,smartscheduling/scikit-learn-categorical-tree,pratapvardhan/scikit-learn,shangwuhencc/scikit-learn,poryfly/scikit-learn,ky822/scikit-learn,0asa/scikit-learn,mblondel/scikit-learn,vivekmishra1991/scikit-learn,pianomania/scikit-learn,espg/scikit-learn,dsquareindia/scikit-learn,poryfly/scikit-learn,trungnt13/scikit-learn,justincassidy/scikit-learn,sarahgrogan/scikit-learn,florian-f/sklearn,TomDLT/scikit-learn,walterreade/scikit-learn,NunoEdgarGub1/scikit-learn,jakirkham/scikit-learn,voxlol/scikit-learn,anirudhjayaraman/scikit-learn,terkkila/scikit-learn,RomainBrault/scikit-learn,joshloyal/scikit-learn,beepee14/scikit-learn,idlead/scikit-learn,glemaitre/scikit-learn,vshtanko/scikit-learn,nelson-liu/scikit-learn,wlamond/scikit-learn,Aasmi/scikit-learn,jakobworldpeace/scikit-learn,aabadie/scikit-learn,belltailjp/scikit-learn,ngoix/OCRF,mattilyra/scikit-learn,arahuja/scikit-learn,rahuldhote/scikit-learn,jayflo/scikit-learn,samuel1208/scikit-learn,wzbozon/scikit-learn,simon-pepin/scikit-learn,thilbern/scikit-learn,glemaitre/scikit-learn,fabioticconi/scikit-learn,espg/scikit-learn,huzq/scikit-learn,AIML/scikit-learn,macks22/scikit-learn,ngoix/OCRF,Sentient07/scikit-learn,billy-inn/scikit-learn,lesteve/scikit-learn,xzh86/scikit-learn,0x0all/scikit-learn,mrshu/scikit-learn,jlegendary/scikit-learn,jayflo/scikit-learn,tomlof/scikit-learn,chrisburr/scikit-learn,nrhine1/scikit-learn,AnasGhrab/scikit-learn,belltailjp/scikit-learn,kmike/scikit-learn,aetilley/scikit-learn,Obus/scikit-learn,mxjl620/scikit-learn,evgchz/scikit-learn,shyamalschandra/scikit-learn,elkingtonmcb/scikit-learn,CforED/Machine-Learning,jjx02230808/project0223,IssamLaradji/scikit-learn,tosolveit/scikit-learn,andrewnc/scikit-learn,Myasuka/scikit-learn,deepesch/scikit-learn,Myasuka/scikit-learn,mikebenfield/scikit-learn,herilalaina/scikit-learn,AlexandreAbraham/scikit-learn,Barmaley-exe/scikit-learn,f3r/scikit-learn,ldirer/scikit-learn,dsullivan7/scikit-learn,zorroblue/scikit-learn,yask123/scikit-learn,lucidfrontier45/scikit-learn,hlin117/scikit-learn,justincassidy/scikit-learn,moutai/scikit-learn,pypot/scikit-learn,yanlend/scikit-learn,Clyde-fare/scikit-learn,mwv/scikit-learn,ZENGXH/scikit-learn,xwolf12/scikit-learn,murali-munna/scikit-learn,mlyundin/scikit-learn,rajat1994/scikit-learn,jblackburne/scikit-learn,Achuth17/scikit-learn,saiwing-yeung/scikit-learn,phdowling/scikit-learn,samuel1208/scikit-learn,devanshdalal/scikit-learn,yyjiang/scikit-learn,cybernet14/scikit-learn,TomDLT/scikit-learn,zuku1985/scikit-learn,mehdidc/scikit-learn,Adai0808/scikit-learn,joernhees/scikit-learn,hsiaoyi0504/scikit-learn,nmayorov/scikit-learn,murali-munna/scikit-learn,pythonvietnam/scikit-learn,ldirer/scikit-learn,victorbergelin/scikit-learn,sgenoud/scikit-learn,vigilv/scikit-learn,altairpearl/scikit-learn,mojoboss/scikit-learn,elkingtonmcb/scikit-learn,Obus/scikit-learn,rahul-c1/scikit-learn,AlexanderFabisch/scikit-learn,IndraVikas/scikit-learn,JeanKossaifi/scikit-learn,BiaDarkia/scikit-learn,walterreade/scikit-learn,huzq/scikit-learn,ycaihua/scikit-learn,cdegroc/scikit-learn,nelson-liu/scikit-learn,robbymeals/scikit-learn,pnedunuri/scikit-learn,r-mart/scikit-learn,arahuja/scikit-learn,maheshakya/scikit-learn,henridwyer/scikit-learn,Barmaley-exe/scikit-learn,IssamLaradji/scikit-learn,IshankGulati/scikit-learn,Sentient07/scikit-learn,rohanp/scikit-learn,theoryno3/scikit-learn,luo66/scikit-learn,AlexandreAbraham/scikit-learn,jayflo/scikit-learn,treycausey/scikit-learn,0x0all/scikit-learn,Achuth17/scikit-learn,siutanwong/scikit-learn,zorroblue/scikit-learn,LiaoPan/scikit-learn,depet/scikit-learn,yask123/scikit-learn,B3AU/waveTree,rrohan/scikit-learn,ankurankan/scikit-learn,JsNoNo/scikit-learn,NunoEdgarGub1/scikit-learn,alvarofierroclavero/scikit-learn,mugizico/scikit-learn,rohanp/scikit-learn,bnaul/scikit-learn,pypot/scikit-learn,kashif/scikit-learn,Adai0808/scikit-learn,cauchycui/scikit-learn,fredhusser/scikit-learn,kmike/scikit-learn,ningchi/scikit-learn,shusenl/scikit-learn,tawsifkhan/scikit-learn,mrshu/scikit-learn,ahoyosid/scikit-learn,Obus/scikit-learn,macks22/scikit-learn,ogrisel/scikit-learn,cwu2011/scikit-learn,UNR-AERIAL/scikit-learn,tosolveit/scikit-learn,mhue/scikit-learn,cainiaocome/scikit-learn,sgenoud/scikit-learn,MartinDelzant/scikit-learn,MartinSavc/scikit-learn,dsullivan7/scikit-learn,davidgbe/scikit-learn,DSLituiev/scikit-learn,loli/sklearn-ensembletrees,nmayorov/scikit-learn,fzalkow/scikit-learn,nvoron23/scikit-learn,nikitasingh981/scikit-learn,russel1237/scikit-learn,eg-zhang/scikit-learn,MatthieuBizien/scikit-learn,pompiduskus/scikit-learn,mehdidc/scikit-learn,IndraVikas/scikit-learn,ltiao/scikit-learn,clemkoa/scikit-learn,chrisburr/scikit-learn,mlyundin/scikit-learn,davidgbe/scikit-learn,etkirsch/scikit-learn,loli/semisupervisedforests,walterreade/scikit-learn,larsmans/scikit-learn,kashif/scikit-learn,RPGOne/scikit-learn,loli/semisupervisedforests,bthirion/scikit-learn,depet/scikit-learn,rahuldhote/scikit-learn,gotomypc/scikit-learn,466152112/scikit-learn,vermouthmjl/scikit-learn,pv/scikit-learn,yask123/scikit-learn,Adai0808/scikit-learn,mhdella/scikit-learn,Lawrence-Liu/scikit-learn,abhishekgahlot/scikit-learn,pkruskal/scikit-learn,arjoly/scikit-learn,henridwyer/scikit-learn,bhargav/scikit-learn,florian-f/sklearn,ishanic/scikit-learn,lazywei/scikit-learn,elkingtonmcb/scikit-learn,kylerbrown/scikit-learn,elkingtonmcb/scikit-learn,yyjiang/scikit-learn,bhargav/scikit-learn,kjung/scikit-learn,eg-zhang/scikit-learn,RPGOne/scikit-learn,betatim/scikit-learn,MechCoder/scikit-learn,ankurankan/scikit-learn,ChanderG/scikit-learn,zaxtax/scikit-learn,appapantula/scikit-learn,LohithBlaze/scikit-learn,florian-f/sklearn,ogrisel/scikit-learn,rahuldhote/scikit-learn,manashmndl/scikit-learn,pianomania/scikit-learn,mfjb/scikit-learn,schets/scikit-learn,zihua/scikit-learn,yyjiang/scikit-learn,PatrickOReilly/scikit-learn,scikit-learn/scikit-learn,Akshay0724/scikit-learn,bigdataelephants/scikit-learn,RayMick/scikit-learn,mxjl620/scikit-learn,shangwuhencc/scikit-learn,fbagirov/scikit-learn,icdishb/scikit-learn,CforED/Machine-Learning,mfjb/scikit-learn,HolgerPeters/scikit-learn,ephes/scikit-learn,PatrickOReilly/scikit-learn,Lawrence-Liu/scikit-learn,robin-lai/scikit-learn,jpautom/scikit-learn,aewhatley/scikit-learn,jm-begon/scikit-learn,MatthieuBizien/scikit-learn,Fireblend/scikit-learn,ZenDevelopmentSystems/scikit-learn,Srisai85/scikit-learn,cl4rke/scikit-learn,jorge2703/scikit-learn,hitszxp/scikit-learn,potash/scikit-learn,jblackburne/scikit-learn,andaag/scikit-learn,liangz0707/scikit-learn,phdowling/scikit-learn,mhdella/scikit-learn,rahul-c1/scikit-learn,h2educ/scikit-learn,ishanic/scikit-learn,rsivapr/scikit-learn,JsNoNo/scikit-learn,anurag313/scikit-learn,waterponey/scikit-learn,shenzebang/scikit-learn,heli522/scikit-learn,gclenaghan/scikit-learn,potash/scikit-learn,PatrickOReilly/scikit-learn,ElDeveloper/scikit-learn,kaichogami/scikit-learn,abimannans/scikit-learn,pnedunuri/scikit-learn,kashif/scikit-learn,ndingwall/scikit-learn,ishanic/scikit-learn,mattilyra/scikit-learn,AlexRobson/scikit-learn,treycausey/scikit-learn,AlexRobson/scikit-learn,bigdataelephants/scikit-learn,siutanwong/scikit-learn,evgchz/scikit-learn,alvarofierroclavero/scikit-learn,henrykironde/scikit-learn,anntzer/scikit-learn,imaculate/scikit-learn,petosegan/scikit-learn,eickenberg/scikit-learn,nhejazi/scikit-learn,fyffyt/scikit-learn,zhenv5/scikit-learn,giorgiop/scikit-learn,Garrett-R/scikit-learn,michigraber/scikit-learn,ClimbsRocks/scikit-learn,mehdidc/scikit-learn,terkkila/scikit-learn,zorojean/scikit-learn,bnaul/scikit-learn,kaichogami/scikit-learn,Aasmi/scikit-learn,eickenberg/scikit-learn,alexeyum/scikit-learn,themrmax/scikit-learn,ahoyosid/scikit-learn,JPFrancoia/scikit-learn,lbishal/scikit-learn,yyjiang/scikit-learn,qifeigit/scikit-learn,stylianos-kampakis/scikit-learn,robbymeals/scikit-learn,r-mart/scikit-learn,shikhardb/scikit-learn,0x0all/scikit-learn,fengzhyuan/scikit-learn,gclenaghan/scikit-learn,fyffyt/scikit-learn,wanggang3333/scikit-learn,Titan-C/scikit-learn,alexeyum/scikit-learn,sanketloke/scikit-learn,amueller/scikit-learn,vigilv/scikit-learn,RayMick/scikit-learn,siutanwong/scikit-learn,Titan-C/scikit-learn,gclenaghan/scikit-learn,jereze/scikit-learn,glouppe/scikit-learn,vibhorag/scikit-learn,hlin117/scikit-learn,trungnt13/scikit-learn,mikebenfield/scikit-learn,shikhardb/scikit-learn,Obus/scikit-learn,CVML/scikit-learn,mfjb/scikit-learn,jereze/scikit-learn,jjx02230808/project0223,chrsrds/scikit-learn,akionakamura/scikit-learn,hugobowne/scikit-learn,hitszxp/scikit-learn,adamgreenhall/scikit-learn,glemaitre/scikit-learn,kjung/scikit-learn,lbishal/scikit-learn,zorroblue/scikit-learn,Clyde-fare/scikit-learn,vermouthmjl/scikit-learn,Adai0808/scikit-learn,Akshay0724/scikit-learn,herilalaina/scikit-learn,anntzer/scikit-learn,shyamalschandra/scikit-learn,liangz0707/scikit-learn,anirudhjayaraman/scikit-learn,PatrickChrist/scikit-learn,vibhorag/scikit-learn,fyffyt/scikit-learn,MatthieuBizien/scikit-learn,AnasGhrab/scikit-learn,lesteve/scikit-learn,espg/scikit-learn,vortex-ape/scikit-learn,JosmanPS/scikit-learn,victorbergelin/scikit-learn,procoder317/scikit-learn,zihua/scikit-learn,djgagne/scikit-learn,rvraghav93/scikit-learn,MartinSavc/scikit-learn,xzh86/scikit-learn,sonnyhu/scikit-learn,xwolf12/scikit-learn,iismd17/scikit-learn,shikhardb/scikit-learn,theoryno3/scikit-learn,vivekmishra1991/scikit-learn,q1ang/scikit-learn,abimannans/scikit-learn,yonglehou/scikit-learn,MechCoder/scikit-learn,ilyes14/scikit-learn,themrmax/scikit-learn,meduz/scikit-learn,scikit-learn/scikit-learn,sergeyf/scikit-learn,chrsrds/scikit-learn,spallavolu/scikit-learn,aminert/scikit-learn,YinongLong/scikit-learn,andrewnc/scikit-learn,mikebenfield/scikit-learn,carrillo/scikit-learn,tawsifkhan/scikit-learn,mblondel/scikit-learn,ChanderG/scikit-learn,0asa/scikit-learn,B3AU/waveTree,iismd17/scikit-learn,ZENGXH/scikit-learn,raghavrv/scikit-learn,krez13/scikit-learn,0x0all/scikit-learn,MechCoder/scikit-learn,Barmaley-exe/scikit-learn,xyguo/scikit-learn,sumspr/scikit-learn,IndraVikas/scikit-learn,bnaul/scikit-learn,anirudhjayaraman/scikit-learn,hrjn/scikit-learn,lucidfrontier45/scikit-learn,xubenben/scikit-learn,aabadie/scikit-learn,fzalkow/scikit-learn,ephes/scikit-learn,ashhher3/scikit-learn,hdmetor/scikit-learn,appapantula/scikit-learn,PrashntS/scikit-learn,jlegendary/scikit-learn,nomadcube/scikit-learn,IssamLaradji/scikit-learn,sgenoud/scikit-learn,hugobowne/scikit-learn,jakobworldpeace/scikit-learn,xyguo/scikit-learn,mwv/scikit-learn,jakobworldpeace/scikit-learn,xavierwu/scikit-learn,tawsifkhan/scikit-learn,ChanderG/scikit-learn,lin-credible/scikit-learn,jzt5132/scikit-learn,samzhang111/scikit-learn,ky822/scikit-learn,khkaminska/scikit-learn,henrykironde/scikit-learn,mjudsp/Tsallis,jakirkham/scikit-learn,tosolveit/scikit-learn,costypetrisor/scikit-learn,hainm/scikit-learn,tmhm/scikit-learn,ldirer/scikit-learn,nrhine1/scikit-learn,petosegan/scikit-learn,untom/scikit-learn,sanketloke/scikit-learn,abhishekgahlot/scikit-learn,aminert/scikit-learn,mayblue9/scikit-learn,iismd17/scikit-learn,NunoEdgarGub1/scikit-learn,rahul-c1/scikit-learn,RayMick/scikit-learn,arjoly/scikit-learn,sarahgrogan/scikit-learn,AIML/scikit-learn,PatrickOReilly/scikit-learn,sergeyf/scikit-learn,krez13/scikit-learn,xyguo/scikit-learn,arahuja/scikit-learn,f3r/scikit-learn,fbagirov/scikit-learn,IshankGulati/scikit-learn,tmhm/scikit-learn,zhenv5/scikit-learn,ilyes14/scikit-learn,fengzhyuan/scikit-learn,olologin/scikit-learn,nvoron23/scikit-learn,wzbozon/scikit-learn,hitszxp/scikit-learn,jereze/scikit-learn,madjelan/scikit-learn,heli522/scikit-learn,khkaminska/scikit-learn,etkirsch/scikit-learn,gotomypc/scikit-learn,MartinSavc/scikit-learn,abhishekgahlot/scikit-learn,kagayakidan/scikit-learn,lesteve/scikit-learn,lesteve/scikit-learn,altairpearl/scikit-learn,mayblue9/scikit-learn,MohammedWasim/scikit-learn,giorgiop/scikit-learn,ivannz/scikit-learn,deepesch/scikit-learn,bigdataelephants/scikit-learn,liberatorqjw/scikit-learn,cl4rke/scikit-learn,loli/sklearn-ensembletrees,jzt5132/scikit-learn,RachitKansal/scikit-learn,lazywei/scikit-learn,PrashntS/scikit-learn,billy-inn/scikit-learn,jaidevd/scikit-learn,Aasmi/scikit-learn,wanggang3333/scikit-learn,macks22/scikit-learn,vermouthmjl/scikit-learn,kmike/scikit-learn,f3r/scikit-learn,chrsrds/scikit-learn,ZENGXH/scikit-learn,macks22/scikit-learn,aetilley/scikit-learn,ngoix/OCRF,mjudsp/Tsallis,Myasuka/scikit-learn,trankmichael/scikit-learn,ElDeveloper/scikit-learn,luo66/scikit-learn,wanggang3333/scikit-learn,frank-tancf/scikit-learn,trungnt13/scikit-learn,cybernet14/scikit-learn,dsquareindia/scikit-learn,alexsavio/scikit-learn,wzbozon/scikit-learn,toastedcornflakes/scikit-learn,cybernet14/scikit-learn,beepee14/scikit-learn,lazywei/scikit-learn,adamgreenhall/scikit-learn,liberatorqjw/scikit-learn,wazeerzulfikar/scikit-learn,DonBeo/scikit-learn,jm-begon/scikit-learn,MohammedWasim/scikit-learn,glennq/scikit-learn,harshaneelhg/scikit-learn,LiaoPan/scikit-learn,equialgo/scikit-learn,djgagne/scikit-learn,jjx02230808/project0223,r-mart/scikit-learn,bthirion/scikit-learn,Barmaley-exe/scikit-learn,tdhopper/scikit-learn,pythonvietnam/scikit-learn,meduz/scikit-learn,olologin/scikit-learn,LohithBlaze/scikit-learn,ankurankan/scikit-learn,altairpearl/scikit-learn,hrjn/scikit-learn,evgchz/scikit-learn,0asa/scikit-learn,xavierwu/scikit-learn,shahankhatch/scikit-learn,JosmanPS/scikit-learn,nhejazi/scikit-learn,etkirsch/scikit-learn,abhishekgahlot/scikit-learn,alexsavio/scikit-learn,aewhatley/scikit-learn,anurag313/scikit-learn,lenovor/scikit-learn,loli/sklearn-ensembletrees,tdhopper/scikit-learn,OshynSong/scikit-learn,ltiao/scikit-learn,pkruskal/scikit-learn,quheng/scikit-learn,procoder317/scikit-learn,btabibian/scikit-learn,rajat1994/scikit-learn,kylerbrown/scikit-learn,hugobowne/scikit-learn,mugizico/scikit-learn,DSLituiev/scikit-learn,nhejazi/scikit-learn,dsquareindia/scikit-learn,PrashntS/scikit-learn,ChanChiChoi/scikit-learn,ephes/scikit-learn,alexsavio/scikit-learn,scikit-learn/scikit-learn,saiwing-yeung/scikit-learn,pianomania/scikit-learn,thientu/scikit-learn,vortex-ape/scikit-learn,hlin117/scikit-learn,q1ang/scikit-learn,tdhopper/scikit-learn,aewhatley/scikit-learn,kjung/scikit-learn,hrjn/scikit-learn,mjudsp/Tsallis,jmetzen/scikit-learn,zorojean/scikit-learn,abimannans/scikit-learn,fzalkow/scikit-learn,PrashntS/scikit-learn,ivannz/scikit-learn,hsuantien/scikit-learn,stylianos-kampakis/scikit-learn,equialgo/scikit-learn,thilbern/scikit-learn,joernhees/scikit-learn,B3AU/waveTree,ZENGXH/scikit-learn,r-mart/scikit-learn,mblondel/scikit-learn,harshaneelhg/scikit-learn,pkruskal/scikit-learn,IshankGulati/scikit-learn,djgagne/scikit-learn,liangz0707/scikit-learn,eickenberg/scikit-learn,jaidevd/scikit-learn,pratapvardhan/scikit-learn,nomadcube/scikit-learn,alexeyum/scikit-learn,devanshdalal/scikit-learn,pv/scikit-learn,nikitasingh981/scikit-learn,fengzhyuan/scikit-learn,sumspr/scikit-learn,kylerbrown/scikit-learn,hainm/scikit-learn,zaxtax/scikit-learn,sonnyhu/scikit-learn,Lawrence-Liu/scikit-learn,vinayak-mehta/scikit-learn,saiwing-yeung/scikit-learn,maheshakya/scikit-learn,zhenv5/scikit-learn,etkirsch/scikit-learn,AlexandreAbraham/scikit-learn,UNR-AERIAL/scikit-learn,larsmans/scikit-learn,billy-inn/scikit-learn,aewhatley/scikit-learn,akionakamura/scikit-learn,Aasmi/scikit-learn,lin-credible/scikit-learn,mikebenfield/scikit-learn,rexshihaoren/scikit-learn,rishikksh20/scikit-learn,gotomypc/scikit-learn,rahuldhote/scikit-learn,marcocaccin/scikit-learn,frank-tancf/scikit-learn,nikitasingh981/scikit-learn,clemkoa/scikit-learn,vivekmishra1991/scikit-learn,ndingwall/scikit-learn,stylianos-kampakis/scikit-learn,glouppe/scikit-learn,theoryno3/scikit-learn,ngoix/OCRF,RayMick/scikit-learn,plissonf/scikit-learn,RPGOne/scikit-learn,ankurankan/scikit-learn,ZenDevelopmentSystems/scikit-learn,dhruv13J/scikit-learn,herilalaina/scikit-learn,lucidfrontier45/scikit-learn,zihua/scikit-learn,xiaoxiamii/scikit-learn,Clyde-fare/scikit-learn,MechCoder/scikit-learn,mblondel/scikit-learn,bnaul/scikit-learn,CforED/Machine-Learning,zuku1985/scikit-learn,abimannans/scikit-learn,roxyboy/scikit-learn,rajat1994/scikit-learn,wazeerzulfikar/scikit-learn,Djabbz/scikit-learn,roxyboy/scikit-learn,xwolf12/scikit-learn,dhruv13J/scikit-learn,CVML/scikit-learn,jseabold/scikit-learn,trankmichael/scikit-learn,wazeerzulfikar/scikit-learn,shusenl/scikit-learn,yanlend/scikit-learn,rsivapr/scikit-learn,robin-lai/scikit-learn,ClimbsRocks/scikit-learn,jkarnows/scikit-learn,fabianp/scikit-learn,nvoron23/scikit-learn,spallavolu/scikit-learn,0asa/scikit-learn,vshtanko/scikit-learn,samzhang111/scikit-learn,pratapvardhan/scikit-learn,jblackburne/scikit-learn,frank-tancf/scikit-learn,abhishekkrthakur/scikit-learn,jm-begon/scikit-learn,vibhorag/scikit-learn,robbymeals/scikit-learn,cdegroc/scikit-learn,betatim/scikit-learn,Garrett-R/scikit-learn,icdishb/scikit-learn,LohithBlaze/scikit-learn,michigraber/scikit-learn,vshtanko/scikit-learn,sumspr/scikit-learn,nelson-liu/scikit-learn,justincassidy/scikit-learn,mrshu/scikit-learn,pianomania/scikit-learn,aetilley/scikit-learn,massmutual/scikit-learn,dingocuster/scikit-learn,eg-zhang/scikit-learn,pkruskal/scikit-learn,imaculate/scikit-learn,themrmax/scikit-learn,poryfly/scikit-learn,jblackburne/scikit-learn,depet/scikit-learn,JsNoNo/scikit-learn,mrshu/scikit-learn,rishikksh20/scikit-learn,NelisVerhoef/scikit-learn,Vimos/scikit-learn,JeanKossaifi/scikit-learn,Vimos/scikit-learn,wlamond/scikit-learn,jmetzen/scikit-learn,procoder317/scikit-learn,carrillo/scikit-learn,petosegan/scikit-learn,466152112/scikit-learn,AnasGhrab/scikit-learn,DonBeo/scikit-learn,ky822/scikit-learn,huobaowangxi/scikit-learn,voxlol/scikit-learn,vigilv/scikit-learn,rexshihaoren/scikit-learn,ndingwall/scikit-learn,poryfly/scikit-learn,shahankhatch/scikit-learn,lenovor/scikit-learn,eg-zhang/scikit-learn,rexshihaoren/scikit-learn,dsullivan7/scikit-learn,dsquareindia/scikit-learn,bigdataelephants/scikit-learn,loli/sklearn-ensembletrees,mojoboss/scikit-learn,terkkila/scikit-learn,glouppe/scikit-learn,jmschrei/scikit-learn,f3r/scikit-learn,theoryno3/scikit-learn,mattgiguere/scikit-learn,ssaeger/scikit-learn,rrohan/scikit-learn,fabioticconi/scikit-learn,ssaeger/scikit-learn,yask123/scikit-learn,thientu/scikit-learn,shusenl/scikit-learn,mjudsp/Tsallis,466152112/scikit-learn,voxlol/scikit-learn,AnasGhrab/scikit-learn,quheng/scikit-learn,xavierwu/scikit-learn,CforED/Machine-Learning,RachitKansal/scikit-learn,Djabbz/scikit-learn,wanggang3333/scikit-learn,xiaoxiamii/scikit-learn,amueller/scikit-learn,eickenberg/scikit-learn,kevin-intel/scikit-learn,shenzebang/scikit-learn,victorbergelin/scikit-learn,aflaxman/scikit-learn,jorik041/scikit-learn,q1ang/scikit-learn,toastedcornflakes/scikit-learn,arjoly/scikit-learn,fabioticconi/scikit-learn,quheng/scikit-learn,wzbozon/scikit-learn,quheng/scikit-learn,ogrisel/scikit-learn,equialgo/scikit-learn,davidgbe/scikit-learn,ldirer/scikit-learn,joernhees/scikit-learn,h2educ/scikit-learn,Windy-Ground/scikit-learn,HolgerPeters/scikit-learn,0x0all/scikit-learn,maheshakya/scikit-learn,ltiao/scikit-learn,Nyker510/scikit-learn,vortex-ape/scikit-learn,Windy-Ground/scikit-learn,cdegroc/scikit-learn,toastedcornflakes/scikit-learn,betatim/scikit-learn,gclenaghan/scikit-learn,sinhrks/scikit-learn,jzt5132/scikit-learn,PatrickChrist/scikit-learn,raghavrv/scikit-learn,RomainBrault/scikit-learn,vigilv/scikit-learn,pv/scikit-learn,luo66/scikit-learn,treycausey/scikit-learn,mugizico/scikit-learn,abhishekgahlot/scikit-learn,hsuantien/scikit-learn,wlamond/scikit-learn,arabenjamin/scikit-learn,giorgiop/scikit-learn,rvraghav93/scikit-learn,xuewei4d/scikit-learn,ogrisel/scikit-learn
|
""" Test the cross_val module
"""
import numpy as np
import nose
from .. import cross_val
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
TEST: Add a smoke test for cross_val_score
|
""" Test the cross_val module
"""
import numpy as np
import nose
from ..base import BaseEstimator
from .. import cross_val
class MockClassifier(BaseEstimator):
"""Dummy classifier to test the cross-validation
"""
def __init__(self, a=0):
self.a = a
def fit(self, X, Y, **params):
self._set_params(**params)
return self
def predict(self, T):
return T.shape[0]
def score(self, X=None, Y=None):
return 1./(1+np.abs(self.a))
X = np.ones((10, 2))
y = np.arange(10)/2
################################################################################
# Tests
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
score = cross_val.cross_val_score(clf, X, y)
np.testing.assert_array_equal(score, clf.score(X, y))
|
<commit_before>""" Test the cross_val module
"""
import numpy as np
import nose
from .. import cross_val
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
<commit_msg>TEST: Add a smoke test for cross_val_score<commit_after>
|
""" Test the cross_val module
"""
import numpy as np
import nose
from ..base import BaseEstimator
from .. import cross_val
class MockClassifier(BaseEstimator):
"""Dummy classifier to test the cross-validation
"""
def __init__(self, a=0):
self.a = a
def fit(self, X, Y, **params):
self._set_params(**params)
return self
def predict(self, T):
return T.shape[0]
def score(self, X=None, Y=None):
return 1./(1+np.abs(self.a))
X = np.ones((10, 2))
y = np.arange(10)/2
################################################################################
# Tests
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
score = cross_val.cross_val_score(clf, X, y)
np.testing.assert_array_equal(score, clf.score(X, y))
|
""" Test the cross_val module
"""
import numpy as np
import nose
from .. import cross_val
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
TEST: Add a smoke test for cross_val_score""" Test the cross_val module
"""
import numpy as np
import nose
from ..base import BaseEstimator
from .. import cross_val
class MockClassifier(BaseEstimator):
"""Dummy classifier to test the cross-validation
"""
def __init__(self, a=0):
self.a = a
def fit(self, X, Y, **params):
self._set_params(**params)
return self
def predict(self, T):
return T.shape[0]
def score(self, X=None, Y=None):
return 1./(1+np.abs(self.a))
X = np.ones((10, 2))
y = np.arange(10)/2
################################################################################
# Tests
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
score = cross_val.cross_val_score(clf, X, y)
np.testing.assert_array_equal(score, clf.score(X, y))
|
<commit_before>""" Test the cross_val module
"""
import numpy as np
import nose
from .. import cross_val
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
<commit_msg>TEST: Add a smoke test for cross_val_score<commit_after>""" Test the cross_val module
"""
import numpy as np
import nose
from ..base import BaseEstimator
from .. import cross_val
class MockClassifier(BaseEstimator):
"""Dummy classifier to test the cross-validation
"""
def __init__(self, a=0):
self.a = a
def fit(self, X, Y, **params):
self._set_params(**params)
return self
def predict(self, T):
return T.shape[0]
def score(self, X=None, Y=None):
return 1./(1+np.abs(self.a))
X = np.ones((10, 2))
y = np.arange(10)/2
################################################################################
# Tests
def test_kfold():
# Check that errors are raise if there is not enough samples
nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3)
y = [0, 0, 1, 1, 2]
nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
score = cross_val.cross_val_score(clf, X, y)
np.testing.assert_array_equal(score, clf.score(X, y))
|
106833059bc2dad8a284de50e153bf673d2e3b4b
|
premis_event_service/urls.py
|
premis_event_service/urls.py
|
from django.conf.urls.defaults import *
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import * # In case of Django<=1.3
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
Support new and old Django urlconf imports
|
Support new and old Django urlconf imports
|
Python
|
bsd-3-clause
|
unt-libraries/django-premis-event-service,unt-libraries/django-premis-event-service,unt-libraries/django-premis-event-service
|
from django.conf.urls.defaults import *
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
Support new and old Django urlconf imports
|
try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import * # In case of Django<=1.3
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
<commit_before>from django.conf.urls.defaults import *
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
<commit_msg>Support new and old Django urlconf imports<commit_after>
|
try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import * # In case of Django<=1.3
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
from django.conf.urls.defaults import *
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
Support new and old Django urlconf importstry:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import * # In case of Django<=1.3
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
<commit_before>from django.conf.urls.defaults import *
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
<commit_msg>Support new and old Django urlconf imports<commit_after>try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import * # In case of Django<=1.3
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
d902045e991cc778dabe31e34a6dcd119e19ccd0
|
attributes/license/main.py
|
attributes/license/main.py
|
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = record[0].rstrip()
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
if 'license' in json_response:
result = 1
else:
result = 0
return result
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
|
from core import tokenize
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = tokenize(record[0].rstrip())
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
result = 'license' in json_response
return result, int(result)
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
|
Update license attribute to return binary and raw result
|
Update license attribute to return binary and raw result
|
Python
|
apache-2.0
|
RepoReapers/reaper,RepoReapers/reaper,RepoReapers/reaper,RepoReapers/reaper
|
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = record[0].rstrip()
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
if 'license' in json_response:
result = 1
else:
result = 0
return result
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
Update license attribute to return binary and raw result
|
from core import tokenize
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = tokenize(record[0].rstrip())
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
result = 'license' in json_response
return result, int(result)
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
|
<commit_before>from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = record[0].rstrip()
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
if 'license' in json_response:
result = 1
else:
result = 0
return result
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
<commit_msg>Update license attribute to return binary and raw result<commit_after>
|
from core import tokenize
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = tokenize(record[0].rstrip())
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
result = 'license' in json_response
return result, int(result)
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
|
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = record[0].rstrip()
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
if 'license' in json_response:
result = 1
else:
result = 0
return result
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
Update license attribute to return binary and raw resultfrom core import tokenize
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = tokenize(record[0].rstrip())
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
result = 'license' in json_response
return result, int(result)
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
|
<commit_before>from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = record[0].rstrip()
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
if 'license' in json_response:
result = 1
else:
result = 0
return result
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
<commit_msg>Update license attribute to return binary and raw result<commit_after>from core import tokenize
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
query = 'SELECT url FROM projects WHERE id = ' + str(project_id)
cursor.execute(query)
record = cursor.fetchone()
full_url = tokenize(record[0].rstrip())
json_response = url_to_json(full_url, headers={
'Accept': 'application/vnd.github.drax-preview+json'
}
)
result = 'license' in json_response
return result, int(result)
if __name__ == '__main__':
print("Attribute plugins are not meant to be executed directly.")
|
61cce2cd23c798a8604274335d9637e8ebce1385
|
api/v2/views/image.py
|
api/v2/views/image.py
|
from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username', 'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
|
from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username',
'versions__machines__instance_source__identifier',
'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
|
Add 'Machine Identifier' for easy support lookups in Troposphere
|
Add 'Machine Identifier' for easy support lookups in Troposphere
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username', 'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
Add 'Machine Identifier' for easy support lookups in Troposphere
|
from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username',
'versions__machines__instance_source__identifier',
'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
|
<commit_before>from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username', 'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
<commit_msg>Add 'Machine Identifier' for easy support lookups in Troposphere<commit_after>
|
from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username',
'versions__machines__instance_source__identifier',
'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
|
from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username', 'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
Add 'Machine Identifier' for easy support lookups in Tropospherefrom core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username',
'versions__machines__instance_source__identifier',
'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
|
<commit_before>from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username', 'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
<commit_msg>Add 'Machine Identifier' for easy support lookups in Troposphere<commit_after>from core.models import Application as Image
from api import permissions
from api.v2.serializers.details import ImageSerializer
from api.v2.views.base import AuthOptionalViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet):
"""
API endpoint that allows images to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace']
filter_fields = ('created_by__username', 'tags__name', 'projects__id')
permission_classes = (permissions.InMaintenance,
permissions.ApiAuthOptional,
permissions.CanEditOrReadOnly,
permissions.ApplicationMemberOrReadOnly)
serializer_class = ImageSerializer
search_fields = ('id', 'name', 'versions__change_log', 'tags__name',
'tags__description', 'created_by__username',
'versions__machines__instance_source__identifier',
'versions__machines__instance_source__provider__location')
def get_queryset(self):
request_user = self.request.user
return Image.current_apps(request_user)
|
358f244b397f11cdf9f89304356ac45b4c6621b5
|
__init__.py
|
__init__.py
|
#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def __result(self, output_dir, retcode):
return {
'gcc': os.path.join(output_dir, 'bin'),
'passed': retcode == 0
}
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'retcode': retcode, 'result': self.__result(self.__output_dir, retcode)})
|
#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'tcc': os.path.join(self.__output_dir, 'tcc', 'bin')})
|
Add return value: tcc path.
|
Add return value: tcc path.
|
Python
|
apache-2.0
|
lugovskoy/dts-sample-compile
|
#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def __result(self, output_dir, retcode):
return {
'gcc': os.path.join(output_dir, 'bin'),
'passed': retcode == 0
}
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'retcode': retcode, 'result': self.__result(self.__output_dir, retcode)})
Add return value: tcc path.
|
#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'tcc': os.path.join(self.__output_dir, 'tcc', 'bin')})
|
<commit_before>#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def __result(self, output_dir, retcode):
return {
'gcc': os.path.join(output_dir, 'bin'),
'passed': retcode == 0
}
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'retcode': retcode, 'result': self.__result(self.__output_dir, retcode)})
<commit_msg>Add return value: tcc path.<commit_after>
|
#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'tcc': os.path.join(self.__output_dir, 'tcc', 'bin')})
|
#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def __result(self, output_dir, retcode):
return {
'gcc': os.path.join(output_dir, 'bin'),
'passed': retcode == 0
}
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'retcode': retcode, 'result': self.__result(self.__output_dir, retcode)})
Add return value: tcc path.#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'tcc': os.path.join(self.__output_dir, 'tcc', 'bin')})
|
<commit_before>#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def __result(self, output_dir, retcode):
return {
'gcc': os.path.join(output_dir, 'bin'),
'passed': retcode == 0
}
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'retcode': retcode, 'result': self.__result(self.__output_dir, retcode)})
<commit_msg>Add return value: tcc path.<commit_after>#! /usr/bin/env python
# coding=utf-8
import os.path
import subprocess
class SubTask():
def __init__(self, output_dir, log):
self.__output_dir = output_dir
self.__log = log
self.__wd = os.path.dirname(os.path.realpath(__file__))
self.__init_done = False
print "__init__"
def is_initialized(self):
print "init", self.__init_done
return self.__init_done
def initialize(self):
print "initialize"
self.__init_done = True
script = os.path.join(self.__wd, 'get_tcc.sh')
retcode = subprocess.call([script, self.__wd])
self.__init_done = retcode == 0
def is_enabled(self):
return True
def run(self, q, args):
print "run"
script = os.path.join(self.__wd, 'conf_and_make.sh')
retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log])
q.put({'tcc': os.path.join(self.__output_dir, 'tcc', 'bin')})
|
61ca14440f39106b6109b96919b520e40170b1f3
|
examples/tour_examples/xkcd_tour.py
|
examples/tour_examples/xkcd_tour.py
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Or here for the previous comic.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click for the license here.", 'a[rel="license"]')
self.add_tour_step("This selects a random comic.", 'a[href*="random"]')
self.add_tour_step("Thanks for taking this tour!")
# self.export_tour() # Use this to export the tour as [my_tour.js]
self.export_tour(filename="xkcd_tour.js") # You can customize the name
self.play_tour()
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Click here for the previous one.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click here for the license.", 'a[rel="license"]')
self.add_tour_step("Click for a random comic.", 'a[href*="/random/"]')
self.add_tour_step("Thanks for taking this tour!")
self.export_tour(filename="xkcd_tour.js") # This exports the tour
self.play_tour() # This plays the tour
|
Update a SeleniumBase tour example
|
Update a SeleniumBase tour example
|
Python
|
mit
|
mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Or here for the previous comic.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click for the license here.", 'a[rel="license"]')
self.add_tour_step("This selects a random comic.", 'a[href*="random"]')
self.add_tour_step("Thanks for taking this tour!")
# self.export_tour() # Use this to export the tour as [my_tour.js]
self.export_tour(filename="xkcd_tour.js") # You can customize the name
self.play_tour()
Update a SeleniumBase tour example
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Click here for the previous one.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click here for the license.", 'a[rel="license"]')
self.add_tour_step("Click for a random comic.", 'a[href*="/random/"]')
self.add_tour_step("Thanks for taking this tour!")
self.export_tour(filename="xkcd_tour.js") # This exports the tour
self.play_tour() # This plays the tour
|
<commit_before>from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Or here for the previous comic.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click for the license here.", 'a[rel="license"]')
self.add_tour_step("This selects a random comic.", 'a[href*="random"]')
self.add_tour_step("Thanks for taking this tour!")
# self.export_tour() # Use this to export the tour as [my_tour.js]
self.export_tour(filename="xkcd_tour.js") # You can customize the name
self.play_tour()
<commit_msg>Update a SeleniumBase tour example<commit_after>
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Click here for the previous one.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click here for the license.", 'a[rel="license"]')
self.add_tour_step("Click for a random comic.", 'a[href*="/random/"]')
self.add_tour_step("Thanks for taking this tour!")
self.export_tour(filename="xkcd_tour.js") # This exports the tour
self.play_tour() # This plays the tour
|
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Or here for the previous comic.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click for the license here.", 'a[rel="license"]')
self.add_tour_step("This selects a random comic.", 'a[href*="random"]')
self.add_tour_step("Thanks for taking this tour!")
# self.export_tour() # Use this to export the tour as [my_tour.js]
self.export_tour(filename="xkcd_tour.js") # You can customize the name
self.play_tour()
Update a SeleniumBase tour examplefrom seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Click here for the previous one.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click here for the license.", 'a[rel="license"]')
self.add_tour_step("Click for a random comic.", 'a[href*="/random/"]')
self.add_tour_step("Thanks for taking this tour!")
self.export_tour(filename="xkcd_tour.js") # This exports the tour
self.play_tour() # This plays the tour
|
<commit_before>from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Or here for the previous comic.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click for the license here.", 'a[rel="license"]')
self.add_tour_step("This selects a random comic.", 'a[href*="random"]')
self.add_tour_step("Thanks for taking this tour!")
# self.export_tour() # Use this to export the tour as [my_tour.js]
self.export_tour(filename="xkcd_tour.js") # You can customize the name
self.play_tour()
<commit_msg>Update a SeleniumBase tour example<commit_after>from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_basic(self):
self.open('https://xkcd.com/1117/')
self.assert_element('img[alt="My Sky"]')
self.create_shepherd_tour()
self.add_tour_step("Welcome to XKCD!")
self.add_tour_step("This is the XKCD logo.", "#masthead img")
self.add_tour_step("Here's the daily webcomic.", "#comic img")
self.add_tour_step("This is the title.", "#ctitle", alignment="top")
self.add_tour_step("Click here for the next comic.", 'a[rel="next"]')
self.add_tour_step("Click here for the previous one.", 'a[rel="prev"]')
self.add_tour_step("Learn about the author here.", 'a[rel="author"]')
self.add_tour_step("Click here for the license.", 'a[rel="license"]')
self.add_tour_step("Click for a random comic.", 'a[href*="/random/"]')
self.add_tour_step("Thanks for taking this tour!")
self.export_tour(filename="xkcd_tour.js") # This exports the tour
self.play_tour() # This plays the tour
|
671cd368c9730e7c15005df4e476e86d80bf0b8e
|
array/rotate-image.py
|
array/rotate-image.py
|
# You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2):
for j in range(i, n-i-1):
temp = a[i][j]
a[i][j] = a[n-1-j][i]
a[n-1-j][i] = a[n-1-i][n-1-j]
a[n-1-i][n-1-j] = a[j][n-1-i]
a[j][n-1-i] = temp
return a
|
# You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2): # loop through all squares one by one
for j in range(i, n-i-1): # loop through inner squares (in groups of 4 in current square)
temp = a[i][j] # store current square
a[i][j] = a[n-1-j][i] # move values from left to top
a[n-1-j][i] = a[n-1-i][n-1-j] # move values from bottom to left
a[n-1-i][n-1-j] = a[j][n-1-i] # move values from right to bottom
a[j][n-1-i] = temp # assign temp to right
return a
# test cases
test_one = [[1,2,3],
[4,5,6],
[7,8,9]]
print rotate_image(test_one)
# prints [[7,4,1],
# [8,5,2],
# [9,6,3]]
test_two = [[1]]
print rotate_image(test_two) # prints [[1]]
test_three = [[10,9,6,3,7],
[6,10,2,9,7],
[7,6,3,8,2],
[8,9,7,9,9],
[6,8,6,8,2]]
print rotate_image(test_three)
# prints [[6,8,7,6,10],
# [8,9,6,10,9],
# [6,7,3,2,6],
# [8,9,8,9,3],
# [2,9,2,7,7]]
|
Add test cases and comments
|
Add test cases and comments
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
# You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2):
for j in range(i, n-i-1):
temp = a[i][j]
a[i][j] = a[n-1-j][i]
a[n-1-j][i] = a[n-1-i][n-1-j]
a[n-1-i][n-1-j] = a[j][n-1-i]
a[j][n-1-i] = temp
return a
Add test cases and comments
|
# You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2): # loop through all squares one by one
for j in range(i, n-i-1): # loop through inner squares (in groups of 4 in current square)
temp = a[i][j] # store current square
a[i][j] = a[n-1-j][i] # move values from left to top
a[n-1-j][i] = a[n-1-i][n-1-j] # move values from bottom to left
a[n-1-i][n-1-j] = a[j][n-1-i] # move values from right to bottom
a[j][n-1-i] = temp # assign temp to right
return a
# test cases
test_one = [[1,2,3],
[4,5,6],
[7,8,9]]
print rotate_image(test_one)
# prints [[7,4,1],
# [8,5,2],
# [9,6,3]]
test_two = [[1]]
print rotate_image(test_two) # prints [[1]]
test_three = [[10,9,6,3,7],
[6,10,2,9,7],
[7,6,3,8,2],
[8,9,7,9,9],
[6,8,6,8,2]]
print rotate_image(test_three)
# prints [[6,8,7,6,10],
# [8,9,6,10,9],
# [6,7,3,2,6],
# [8,9,8,9,3],
# [2,9,2,7,7]]
|
<commit_before># You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2):
for j in range(i, n-i-1):
temp = a[i][j]
a[i][j] = a[n-1-j][i]
a[n-1-j][i] = a[n-1-i][n-1-j]
a[n-1-i][n-1-j] = a[j][n-1-i]
a[j][n-1-i] = temp
return a
<commit_msg>Add test cases and comments<commit_after>
|
# You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2): # loop through all squares one by one
for j in range(i, n-i-1): # loop through inner squares (in groups of 4 in current square)
temp = a[i][j] # store current square
a[i][j] = a[n-1-j][i] # move values from left to top
a[n-1-j][i] = a[n-1-i][n-1-j] # move values from bottom to left
a[n-1-i][n-1-j] = a[j][n-1-i] # move values from right to bottom
a[j][n-1-i] = temp # assign temp to right
return a
# test cases
test_one = [[1,2,3],
[4,5,6],
[7,8,9]]
print rotate_image(test_one)
# prints [[7,4,1],
# [8,5,2],
# [9,6,3]]
test_two = [[1]]
print rotate_image(test_two) # prints [[1]]
test_three = [[10,9,6,3,7],
[6,10,2,9,7],
[7,6,3,8,2],
[8,9,7,9,9],
[6,8,6,8,2]]
print rotate_image(test_three)
# prints [[6,8,7,6,10],
# [8,9,6,10,9],
# [6,7,3,2,6],
# [8,9,8,9,3],
# [2,9,2,7,7]]
|
# You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2):
for j in range(i, n-i-1):
temp = a[i][j]
a[i][j] = a[n-1-j][i]
a[n-1-j][i] = a[n-1-i][n-1-j]
a[n-1-i][n-1-j] = a[j][n-1-i]
a[j][n-1-i] = temp
return a
Add test cases and comments# You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2): # loop through all squares one by one
for j in range(i, n-i-1): # loop through inner squares (in groups of 4 in current square)
temp = a[i][j] # store current square
a[i][j] = a[n-1-j][i] # move values from left to top
a[n-1-j][i] = a[n-1-i][n-1-j] # move values from bottom to left
a[n-1-i][n-1-j] = a[j][n-1-i] # move values from right to bottom
a[j][n-1-i] = temp # assign temp to right
return a
# test cases
test_one = [[1,2,3],
[4,5,6],
[7,8,9]]
print rotate_image(test_one)
# prints [[7,4,1],
# [8,5,2],
# [9,6,3]]
test_two = [[1]]
print rotate_image(test_two) # prints [[1]]
test_three = [[10,9,6,3,7],
[6,10,2,9,7],
[7,6,3,8,2],
[8,9,7,9,9],
[6,8,6,8,2]]
print rotate_image(test_three)
# prints [[6,8,7,6,10],
# [8,9,6,10,9],
# [6,7,3,2,6],
# [8,9,8,9,3],
# [2,9,2,7,7]]
|
<commit_before># You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2):
for j in range(i, n-i-1):
temp = a[i][j]
a[i][j] = a[n-1-j][i]
a[n-1-j][i] = a[n-1-i][n-1-j]
a[n-1-i][n-1-j] = a[j][n-1-i]
a[j][n-1-i] = temp
return a
<commit_msg>Add test cases and comments<commit_after># You are given an n x n 2D matrix that represents an image. Rotate the image by 90 degrees clockwise
# solve with O(1) additional memory
def rotate_image(a):
n = len(a)
if a is None or n < 1:
return a
else:
for i in range(n/2): # loop through all squares one by one
for j in range(i, n-i-1): # loop through inner squares (in groups of 4 in current square)
temp = a[i][j] # store current square
a[i][j] = a[n-1-j][i] # move values from left to top
a[n-1-j][i] = a[n-1-i][n-1-j] # move values from bottom to left
a[n-1-i][n-1-j] = a[j][n-1-i] # move values from right to bottom
a[j][n-1-i] = temp # assign temp to right
return a
# test cases
test_one = [[1,2,3],
[4,5,6],
[7,8,9]]
print rotate_image(test_one)
# prints [[7,4,1],
# [8,5,2],
# [9,6,3]]
test_two = [[1]]
print rotate_image(test_two) # prints [[1]]
test_three = [[10,9,6,3,7],
[6,10,2,9,7],
[7,6,3,8,2],
[8,9,7,9,9],
[6,8,6,8,2]]
print rotate_image(test_three)
# prints [[6,8,7,6,10],
# [8,9,6,10,9],
# [6,7,3,2,6],
# [8,9,8,9,3],
# [2,9,2,7,7]]
|
d68f391d15927db65ea4e62d67bd9faf37b5deaf
|
file_process/utils/get_reference.py
|
file_process/utils/get_reference.py
|
from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
refallele = twobit_file[chrom][start:end]
return refallele
|
from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
try:
refallele = twobit_file[chrom][start:end]
except TypeError:
refallele = 'N'
return refallele
|
Return N for ref allele in pos uncovered by ref
|
Return N for ref allele in pos uncovered by ref
This arose during processing of a CGI var file originating from LFR
data (not sure, but LFR is an advanced technique, so it's possible CGI
is calling some positions that aren't called in reference).
|
Python
|
mit
|
PersonalGenomesOrg/archive-genevieve-201505,PersonalGenomesOrg/archive-genevieve-201505,PersonalGenomesOrg/archive-genevieve-201505
|
from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
refallele = twobit_file[chrom][start:end]
return refallele
Return N for ref allele in pos uncovered by ref
This arose during processing of a CGI var file originating from LFR
data (not sure, but LFR is an advanced technique, so it's possible CGI
is calling some positions that aren't called in reference).
|
from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
try:
refallele = twobit_file[chrom][start:end]
except TypeError:
refallele = 'N'
return refallele
|
<commit_before>from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
refallele = twobit_file[chrom][start:end]
return refallele
<commit_msg>Return N for ref allele in pos uncovered by ref
This arose during processing of a CGI var file originating from LFR
data (not sure, but LFR is an advanced technique, so it's possible CGI
is calling some positions that aren't called in reference).<commit_after>
|
from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
try:
refallele = twobit_file[chrom][start:end]
except TypeError:
refallele = 'N'
return refallele
|
from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
refallele = twobit_file[chrom][start:end]
return refallele
Return N for ref allele in pos uncovered by ref
This arose during processing of a CGI var file originating from LFR
data (not sure, but LFR is an advanced technique, so it's possible CGI
is calling some positions that aren't called in reference).from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
try:
refallele = twobit_file[chrom][start:end]
except TypeError:
refallele = 'N'
return refallele
|
<commit_before>from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
refallele = twobit_file[chrom][start:end]
return refallele
<commit_msg>Return N for ref allele in pos uncovered by ref
This arose during processing of a CGI var file originating from LFR
data (not sure, but LFR is an advanced technique, so it's possible CGI
is calling some positions that aren't called in reference).<commit_after>from __future__ import absolute_import
from .twobit import TwoBitFile
def get_reference_allele(chrom, start, hg19_path):
twobit_file = TwoBitFile(hg19_path)
end = start + 1
try:
refallele = twobit_file[chrom][start:end]
except TypeError:
refallele = 'N'
return refallele
|
461522c3b79202c915544466272d3bb2a3d0ecbe
|
api/radar_api/serializers/meta.py
|
api/radar_api/serializers/meta.py
|
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
pass
|
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField, DateTimeField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class CreatedDateMixin(object):
created_date = DateTimeField(read_only=False)
class ModifiedDateMixin(object):
modified_date = DateTimeField(read_only=False)
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
pass
|
Add created and modified date mixins
|
Add created and modified date mixins
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
pass
Add created and modified date mixins
|
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField, DateTimeField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class CreatedDateMixin(object):
created_date = DateTimeField(read_only=False)
class ModifiedDateMixin(object):
modified_date = DateTimeField(read_only=False)
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
pass
|
<commit_before>from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
pass
<commit_msg>Add created and modified date mixins<commit_after>
|
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField, DateTimeField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class CreatedDateMixin(object):
created_date = DateTimeField(read_only=False)
class ModifiedDateMixin(object):
modified_date = DateTimeField(read_only=False)
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
pass
|
from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
pass
Add created and modified date mixinsfrom radar.models.users import User
from radar.serializers.fields import StringField, IntegerField, DateTimeField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class CreatedDateMixin(object):
created_date = DateTimeField(read_only=False)
class ModifiedDateMixin(object):
modified_date = DateTimeField(read_only=False)
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
pass
|
<commit_before>from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin):
pass
<commit_msg>Add created and modified date mixins<commit_after>from radar.models.users import User
from radar.serializers.fields import StringField, IntegerField, DateTimeField
from radar.serializers.models import ModelSerializer
class TinyUserSerializer(ModelSerializer):
id = IntegerField()
username = StringField()
email = StringField()
first_name = StringField()
last_name = StringField()
class Meta:
model_class = User
fields = (
'id',
'username',
'email',
'first_name',
'last_name'
)
class CreatedUserMixin(object):
created_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(CreatedUserMixin, self).get_model_exclude()
model_exclude.add('created_user_id')
return model_exclude
class ModifiedUserMixin(object):
modified_user = TinyUserSerializer(read_only=True)
def get_model_exclude(self):
model_exclude = super(ModifiedUserMixin, self).get_model_exclude()
model_exclude.add('modified_user_id')
return model_exclude
class CreatedDateMixin(object):
created_date = DateTimeField(read_only=False)
class ModifiedDateMixin(object):
modified_date = DateTimeField(read_only=False)
class MetaSerializerMixin(CreatedUserMixin, ModifiedUserMixin, CreatedDateMixin, ModifiedDateMixin):
pass
|
71d6176b1468a5bf9aef1ced5214c32b69efaf50
|
apps/countdown/views.py
|
apps/countdown/views.py
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour) - 1 #JS also takes hours in retarded format...
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour)
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))
|
Fix bug in countdonw hours calculation
|
Fix bug in countdonw hours calculation
|
Python
|
bsd-3-clause
|
Teknologforeningen/tf-info,Teknologforeningen/tf-info,Teknologforeningen/tf-info
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour) - 1 #JS also takes hours in retarded format...
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))Fix bug in countdonw hours calculation
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour)
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))
|
<commit_before>from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour) - 1 #JS also takes hours in retarded format...
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))<commit_msg>Fix bug in countdonw hours calculation<commit_after>
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour)
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour) - 1 #JS also takes hours in retarded format...
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))Fix bug in countdonw hours calculationfrom django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour)
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))
|
<commit_before>from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour) - 1 #JS also takes hours in retarded format...
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))<commit_msg>Fix bug in countdonw hours calculation<commit_after>from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.utils.http import urlencode
import json
import urllib2
def index(request, year, month, day, hour, text):
params = {
'year': year,
'month': int(month) - 1, #JS takes months in retarded format...
'day': day,
'text': text,
'hour': int(hour)
}
return render_to_response('countdown/index.html', params, context_instance=RequestContext(request))
|
bde0363b51bfa7bb6facac1185c9a687ff952e36
|
artifacts/exceptions.py
|
artifacts/exceptions.py
|
# -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
class ArtifactsError(RuntimeError):
"""Base for all exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Root for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
|
# -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
__all__ = [
'ArtifactsError',
'ArtifactoryApiError',
'NoReleaseArtifactsError',
'NoArtifactVersionsError'
]
class ArtifactsError(RuntimeError):
"""Base for exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Base for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
|
Add __all__ variable to enforce ordering in docs
|
Add __all__ variable to enforce ordering in docs
|
Python
|
mit
|
smarter-travel-media/stac
|
# -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
class ArtifactsError(RuntimeError):
"""Base for all exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Root for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
Add __all__ variable to enforce ordering in docs
|
# -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
__all__ = [
'ArtifactsError',
'ArtifactoryApiError',
'NoReleaseArtifactsError',
'NoArtifactVersionsError'
]
class ArtifactsError(RuntimeError):
"""Base for exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Base for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
|
<commit_before># -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
class ArtifactsError(RuntimeError):
"""Base for all exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Root for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
<commit_msg>Add __all__ variable to enforce ordering in docs<commit_after>
|
# -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
__all__ = [
'ArtifactsError',
'ArtifactoryApiError',
'NoReleaseArtifactsError',
'NoArtifactVersionsError'
]
class ArtifactsError(RuntimeError):
"""Base for exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Base for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
|
# -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
class ArtifactsError(RuntimeError):
"""Base for all exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Root for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
Add __all__ variable to enforce ordering in docs# -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
__all__ = [
'ArtifactsError',
'ArtifactoryApiError',
'NoReleaseArtifactsError',
'NoArtifactVersionsError'
]
class ArtifactsError(RuntimeError):
"""Base for exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Base for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
|
<commit_before># -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
class ArtifactsError(RuntimeError):
"""Base for all exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Root for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
<commit_msg>Add __all__ variable to enforce ordering in docs<commit_after># -*- coding: utf-8 -*-
#
# Artifacts - Artifactory Search Client
#
# Copyright 2015 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
"""
artifacts.exceptions
~~~~~~~~~~~~~~~~~~~~
Exceptions raised by the Artifacts library.
"""
from __future__ import print_function, division
__all__ = [
'ArtifactsError',
'ArtifactoryApiError',
'NoReleaseArtifactsError',
'NoArtifactVersionsError'
]
class ArtifactsError(RuntimeError):
"""Base for exceptions raised by the Artifacts library"""
class ArtifactoryApiError(ArtifactsError):
"""Base for errors interacting with the Artifactory REST API"""
def __init__(self, *args, **kwargs):
#: HTTP status code returned by the Artifactory REST API
self.code = kwargs.pop('code', None)
#: URL used for making a request to the Artifactory REST API
self.url = kwargs.pop('url', None)
super(ArtifactoryApiError, self).__init__(*args, **kwargs)
class NoReleaseArtifactsError(ArtifactoryApiError):
"""There were no release artifacts for the project in the given repository"""
class NoArtifactVersionsError(ArtifactoryApiError):
"""There were no versions for the project in the given repository"""
|
784fb8591cd1a66de1adac9626d8c4fb02d8e01e
|
examples/customization/pwd-cd-and-system/utils.py
|
examples/customization/pwd-cd-and-system/utils.py
|
"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
def chdir(debugger, args, result, dict):
"""Change the working directory, or cd to ${HOME}."""
dir = args.strip()
if dir:
os.chdir(args)
else:
os.chdir(os.path.expanduser('~'))
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
|
"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
# Store the previous working directory for the 'cd -' command.
class Holder:
"""Holds the _prev_dir_ class attribute for chdir() function."""
_prev_dir_ = None
@classmethod
def prev_dir(cls):
return cls._prev_dir_
@classmethod
def swap(cls, dir):
cls._prev_dir_ = dir
def chdir(debugger, args, result, dict):
"""
Change the working directory, or cd to ${HOME}.
You can also issue 'cd -' to change to the previous working directory.
"""
new_dir = args.strip()
if not new_dir:
new_dir = os.path.expanduser('~')
elif new_dir == '-':
if not Holder.prev_dir():
# Bad directory, not changing.
print "bad directory, not changing"
return
else:
new_dir = Holder.prev_dir()
Holder.swap(os.getcwd())
os.chdir(new_dir)
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
|
Add 'cd -' feature to change to the previous working directory.
|
Add 'cd -' feature to change to the previous working directory.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141846 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb
|
"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
def chdir(debugger, args, result, dict):
"""Change the working directory, or cd to ${HOME}."""
dir = args.strip()
if dir:
os.chdir(args)
else:
os.chdir(os.path.expanduser('~'))
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
Add 'cd -' feature to change to the previous working directory.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141846 91177308-0d34-0410-b5e6-96231b3b80d8
|
"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
# Store the previous working directory for the 'cd -' command.
class Holder:
"""Holds the _prev_dir_ class attribute for chdir() function."""
_prev_dir_ = None
@classmethod
def prev_dir(cls):
return cls._prev_dir_
@classmethod
def swap(cls, dir):
cls._prev_dir_ = dir
def chdir(debugger, args, result, dict):
"""
Change the working directory, or cd to ${HOME}.
You can also issue 'cd -' to change to the previous working directory.
"""
new_dir = args.strip()
if not new_dir:
new_dir = os.path.expanduser('~')
elif new_dir == '-':
if not Holder.prev_dir():
# Bad directory, not changing.
print "bad directory, not changing"
return
else:
new_dir = Holder.prev_dir()
Holder.swap(os.getcwd())
os.chdir(new_dir)
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
|
<commit_before>"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
def chdir(debugger, args, result, dict):
"""Change the working directory, or cd to ${HOME}."""
dir = args.strip()
if dir:
os.chdir(args)
else:
os.chdir(os.path.expanduser('~'))
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
<commit_msg>Add 'cd -' feature to change to the previous working directory.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141846 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
# Store the previous working directory for the 'cd -' command.
class Holder:
"""Holds the _prev_dir_ class attribute for chdir() function."""
_prev_dir_ = None
@classmethod
def prev_dir(cls):
return cls._prev_dir_
@classmethod
def swap(cls, dir):
cls._prev_dir_ = dir
def chdir(debugger, args, result, dict):
"""
Change the working directory, or cd to ${HOME}.
You can also issue 'cd -' to change to the previous working directory.
"""
new_dir = args.strip()
if not new_dir:
new_dir = os.path.expanduser('~')
elif new_dir == '-':
if not Holder.prev_dir():
# Bad directory, not changing.
print "bad directory, not changing"
return
else:
new_dir = Holder.prev_dir()
Holder.swap(os.getcwd())
os.chdir(new_dir)
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
|
"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
def chdir(debugger, args, result, dict):
"""Change the working directory, or cd to ${HOME}."""
dir = args.strip()
if dir:
os.chdir(args)
else:
os.chdir(os.path.expanduser('~'))
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
Add 'cd -' feature to change to the previous working directory.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141846 91177308-0d34-0410-b5e6-96231b3b80d8"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
# Store the previous working directory for the 'cd -' command.
class Holder:
"""Holds the _prev_dir_ class attribute for chdir() function."""
_prev_dir_ = None
@classmethod
def prev_dir(cls):
return cls._prev_dir_
@classmethod
def swap(cls, dir):
cls._prev_dir_ = dir
def chdir(debugger, args, result, dict):
"""
Change the working directory, or cd to ${HOME}.
You can also issue 'cd -' to change to the previous working directory.
"""
new_dir = args.strip()
if not new_dir:
new_dir = os.path.expanduser('~')
elif new_dir == '-':
if not Holder.prev_dir():
# Bad directory, not changing.
print "bad directory, not changing"
return
else:
new_dir = Holder.prev_dir()
Holder.swap(os.getcwd())
os.chdir(new_dir)
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
|
<commit_before>"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
def chdir(debugger, args, result, dict):
"""Change the working directory, or cd to ${HOME}."""
dir = args.strip()
if dir:
os.chdir(args)
else:
os.chdir(os.path.expanduser('~'))
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
<commit_msg>Add 'cd -' feature to change to the previous working directory.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@141846 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>"""Utility for changing directories and execution of commands in a subshell."""
import os, shlex, subprocess
# Store the previous working directory for the 'cd -' command.
class Holder:
"""Holds the _prev_dir_ class attribute for chdir() function."""
_prev_dir_ = None
@classmethod
def prev_dir(cls):
return cls._prev_dir_
@classmethod
def swap(cls, dir):
cls._prev_dir_ = dir
def chdir(debugger, args, result, dict):
"""
Change the working directory, or cd to ${HOME}.
You can also issue 'cd -' to change to the previous working directory.
"""
new_dir = args.strip()
if not new_dir:
new_dir = os.path.expanduser('~')
elif new_dir == '-':
if not Holder.prev_dir():
# Bad directory, not changing.
print "bad directory, not changing"
return
else:
new_dir = Holder.prev_dir()
Holder.swap(os.getcwd())
os.chdir(new_dir)
print "Current working directory: %s" % os.getcwd()
def system(debugger, command_line, result, dict):
"""Execute the command (a string) in a subshell."""
args = shlex.split(command_line)
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
retcode = process.poll()
if output and error:
print "stdout=>\n", output
print "stderr=>\n", error
elif output:
print output
elif error:
print error
print "retcode:", retcode
|
c1ed5eb96b04ca0af2ad8f26023d8cbaa4a75eda
|
rx/concurrency/threadpoolscheduler.py
|
rx/concurrency/threadpoolscheduler.py
|
import logging
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler, Disposable
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
from .timeoutscheduler import TimeoutScheduler
log = logging.getLogger("Rx")
class ThreadPoolScheduler(TimeoutScheduler):
"""A scheduler that schedules work via the thread pool and threading
timers."""
def __init__(self, max_workers=None):
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def schedule(self, action, state=None):
"""Schedules an action to be executed."""
disposable = SingleAssignmentDisposable()
def run():
disposable.disposable = self.invoke_action(action, state)
future = self.executor.submit(run)
def dispose():
future.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler
from .newthreadscheduler import NewThreadScheduler
class ThreadPoolScheduler(NewThreadScheduler):
"""A scheduler that schedules work via the thread pool."""
class ThreadPoolThread:
"""Wraps a concurrent future as a thread."""
def __init__(self, executor, run):
self.run = run
self.future = None
self.executor = executor
def start(self):
self.future = self.executor.submit(self.run)
def cancel(self):
self.future.cancel()
def __init__(self, max_workers=None):
super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def thread_factory(self, target, *args):
return self.ThreadPoolThread(self.executor, target)
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
Make thread pool scheduler behave as a pooled new thread scheduler
|
Make thread pool scheduler behave as a pooled new thread scheduler
|
Python
|
mit
|
ReactiveX/RxPY,ReactiveX/RxPY
|
import logging
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler, Disposable
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
from .timeoutscheduler import TimeoutScheduler
log = logging.getLogger("Rx")
class ThreadPoolScheduler(TimeoutScheduler):
"""A scheduler that schedules work via the thread pool and threading
timers."""
def __init__(self, max_workers=None):
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def schedule(self, action, state=None):
"""Schedules an action to be executed."""
disposable = SingleAssignmentDisposable()
def run():
disposable.disposable = self.invoke_action(action, state)
future = self.executor.submit(run)
def dispose():
future.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
Make thread pool scheduler behave as a pooled new thread scheduler
|
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler
from .newthreadscheduler import NewThreadScheduler
class ThreadPoolScheduler(NewThreadScheduler):
"""A scheduler that schedules work via the thread pool."""
class ThreadPoolThread:
"""Wraps a concurrent future as a thread."""
def __init__(self, executor, run):
self.run = run
self.future = None
self.executor = executor
def start(self):
self.future = self.executor.submit(self.run)
def cancel(self):
self.future.cancel()
def __init__(self, max_workers=None):
super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def thread_factory(self, target, *args):
return self.ThreadPoolThread(self.executor, target)
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
<commit_before>import logging
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler, Disposable
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
from .timeoutscheduler import TimeoutScheduler
log = logging.getLogger("Rx")
class ThreadPoolScheduler(TimeoutScheduler):
"""A scheduler that schedules work via the thread pool and threading
timers."""
def __init__(self, max_workers=None):
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def schedule(self, action, state=None):
"""Schedules an action to be executed."""
disposable = SingleAssignmentDisposable()
def run():
disposable.disposable = self.invoke_action(action, state)
future = self.executor.submit(run)
def dispose():
future.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
<commit_msg>Make thread pool scheduler behave as a pooled new thread scheduler<commit_after>
|
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler
from .newthreadscheduler import NewThreadScheduler
class ThreadPoolScheduler(NewThreadScheduler):
"""A scheduler that schedules work via the thread pool."""
class ThreadPoolThread:
"""Wraps a concurrent future as a thread."""
def __init__(self, executor, run):
self.run = run
self.future = None
self.executor = executor
def start(self):
self.future = self.executor.submit(self.run)
def cancel(self):
self.future.cancel()
def __init__(self, max_workers=None):
super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def thread_factory(self, target, *args):
return self.ThreadPoolThread(self.executor, target)
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
import logging
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler, Disposable
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
from .timeoutscheduler import TimeoutScheduler
log = logging.getLogger("Rx")
class ThreadPoolScheduler(TimeoutScheduler):
"""A scheduler that schedules work via the thread pool and threading
timers."""
def __init__(self, max_workers=None):
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def schedule(self, action, state=None):
"""Schedules an action to be executed."""
disposable = SingleAssignmentDisposable()
def run():
disposable.disposable = self.invoke_action(action, state)
future = self.executor.submit(run)
def dispose():
future.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
Make thread pool scheduler behave as a pooled new thread schedulerfrom concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler
from .newthreadscheduler import NewThreadScheduler
class ThreadPoolScheduler(NewThreadScheduler):
"""A scheduler that schedules work via the thread pool."""
class ThreadPoolThread:
"""Wraps a concurrent future as a thread."""
def __init__(self, executor, run):
self.run = run
self.future = None
self.executor = executor
def start(self):
self.future = self.executor.submit(self.run)
def cancel(self):
self.future.cancel()
def __init__(self, max_workers=None):
super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def thread_factory(self, target, *args):
return self.ThreadPoolThread(self.executor, target)
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
<commit_before>import logging
from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler, Disposable
from rx.disposables import SingleAssignmentDisposable, CompositeDisposable
from .timeoutscheduler import TimeoutScheduler
log = logging.getLogger("Rx")
class ThreadPoolScheduler(TimeoutScheduler):
"""A scheduler that schedules work via the thread pool and threading
timers."""
def __init__(self, max_workers=None):
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def schedule(self, action, state=None):
"""Schedules an action to be executed."""
disposable = SingleAssignmentDisposable()
def run():
disposable.disposable = self.invoke_action(action, state)
future = self.executor.submit(run)
def dispose():
future.cancel()
return CompositeDisposable(disposable, Disposable.create(dispose))
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
<commit_msg>Make thread pool scheduler behave as a pooled new thread scheduler<commit_after>from concurrent.futures import ThreadPoolExecutor
from rx.core import Scheduler
from .newthreadscheduler import NewThreadScheduler
class ThreadPoolScheduler(NewThreadScheduler):
"""A scheduler that schedules work via the thread pool."""
class ThreadPoolThread:
"""Wraps a concurrent future as a thread."""
def __init__(self, executor, run):
self.run = run
self.future = None
self.executor = executor
def start(self):
self.future = self.executor.submit(self.run)
def cancel(self):
self.future.cancel()
def __init__(self, max_workers=None):
super(ThreadPoolScheduler, self).__init__(self.thread_factory)
self.executor = ThreadPoolExecutor(max_workers=max_workers)
def thread_factory(self, target, *args):
return self.ThreadPoolThread(self.executor, target)
Scheduler.thread_pool = thread_pool_scheduler = ThreadPoolScheduler()
|
71b73151c358d2c3d6ceae80d6a2287143085065
|
arxiv_vanity/scraper/arxiv_ids.py
|
arxiv_vanity/scraper/arxiv_ids.py
|
import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
|
import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN, re.I)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
|
Handle capital "V" in URL
|
Handle capital "V" in URL
|
Python
|
apache-2.0
|
arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity,arxiv-vanity/arxiv-vanity
|
import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
Handle capital "V" in URL
|
import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN, re.I)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
|
<commit_before>import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
<commit_msg>Handle capital "V" in URL<commit_after>
|
import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN, re.I)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
|
import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
Handle capital "V" in URLimport re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN, re.I)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
|
<commit_before>import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
<commit_msg>Handle capital "V" in URL<commit_after>import re
ARXIV_ID_PATTERN = r"([a-z\-]+(?:\.[A-Z]{2})?/\d{7}|\d+\.\d+)(v\d+)?"
ARXIV_ID_RE = re.compile(ARXIV_ID_PATTERN, re.I)
ARXIV_URL_RE = re.compile(fr"arxiv.org/[^\/]+/({ARXIV_ID_PATTERN})(\.pdf)?", re.I)
ARXIV_DOI_RE = re.compile(fr"^(?:arxiv:)?({ARXIV_ID_PATTERN})$", re.I)
ARXIV_VANITY_RE = re.compile(
fr"(?:localhost\:\d+|arxiv-vanity\.com)/[^\/]+/({ARXIV_ID_PATTERN})\/?", re.I
)
def remove_version_from_arxiv_id(arxiv_id):
match = ARXIV_ID_RE.match(arxiv_id)
return match.group(1), int(match.group(2)[1:]) if match.group(2) else None
ARXIV_VERSION_RE = re.compile(r"v(\d+)$")
def remove_version_from_arxiv_url(url):
return ARXIV_VERSION_RE.sub("", url)
|
358fcbf44903d817f115d4df1074a89a9f151c9c
|
pythonforandroid/recipes/pymunk/__init__.py
|
pythonforandroid/recipes/pymunk/__init__.py
|
from os.path import join
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = '5.5.0'
url = 'https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip'
depends = ['cffi', 'setuptools']
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
env['LDFLAGS'] += " -shared -llog"
env['LDFLAGS'] += ' -L{}'.format(join(self.ctx.ndk_platform, 'usr', 'lib'))
env['LIBS'] = env.get('LIBS', '') + ' -landroid'
return env
recipe = PymunkRecipe()
|
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = "6.0.0"
url = "https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip"
depends = ["cffi", "setuptools"]
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env["LDFLAGS"] += " -llog"
return env
recipe = PymunkRecipe()
|
Update Pymunk recipe to 6.0.0
|
Update Pymunk recipe to 6.0.0
|
Python
|
mit
|
PKRoma/python-for-android,kronenpj/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,kivy/python-for-android,kronenpj/python-for-android,kivy/python-for-android,kronenpj/python-for-android,kivy/python-for-android,kivy/python-for-android,kivy/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android
|
from os.path import join
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = '5.5.0'
url = 'https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip'
depends = ['cffi', 'setuptools']
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
env['LDFLAGS'] += " -shared -llog"
env['LDFLAGS'] += ' -L{}'.format(join(self.ctx.ndk_platform, 'usr', 'lib'))
env['LIBS'] = env.get('LIBS', '') + ' -landroid'
return env
recipe = PymunkRecipe()
Update Pymunk recipe to 6.0.0
|
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = "6.0.0"
url = "https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip"
depends = ["cffi", "setuptools"]
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env["LDFLAGS"] += " -llog"
return env
recipe = PymunkRecipe()
|
<commit_before>from os.path import join
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = '5.5.0'
url = 'https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip'
depends = ['cffi', 'setuptools']
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
env['LDFLAGS'] += " -shared -llog"
env['LDFLAGS'] += ' -L{}'.format(join(self.ctx.ndk_platform, 'usr', 'lib'))
env['LIBS'] = env.get('LIBS', '') + ' -landroid'
return env
recipe = PymunkRecipe()
<commit_msg>Update Pymunk recipe to 6.0.0<commit_after>
|
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = "6.0.0"
url = "https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip"
depends = ["cffi", "setuptools"]
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env["LDFLAGS"] += " -llog"
return env
recipe = PymunkRecipe()
|
from os.path import join
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = '5.5.0'
url = 'https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip'
depends = ['cffi', 'setuptools']
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
env['LDFLAGS'] += " -shared -llog"
env['LDFLAGS'] += ' -L{}'.format(join(self.ctx.ndk_platform, 'usr', 'lib'))
env['LIBS'] = env.get('LIBS', '') + ' -landroid'
return env
recipe = PymunkRecipe()
Update Pymunk recipe to 6.0.0from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = "6.0.0"
url = "https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip"
depends = ["cffi", "setuptools"]
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env["LDFLAGS"] += " -llog"
return env
recipe = PymunkRecipe()
|
<commit_before>from os.path import join
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = '5.5.0'
url = 'https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip'
depends = ['cffi', 'setuptools']
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env['PYTHON_ROOT'] = self.ctx.get_python_install_dir()
env['LDFLAGS'] += " -shared -llog"
env['LDFLAGS'] += ' -L{}'.format(join(self.ctx.ndk_platform, 'usr', 'lib'))
env['LIBS'] = env.get('LIBS', '') + ' -landroid'
return env
recipe = PymunkRecipe()
<commit_msg>Update Pymunk recipe to 6.0.0<commit_after>from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PymunkRecipe(CompiledComponentsPythonRecipe):
name = "pymunk"
version = "6.0.0"
url = "https://pypi.python.org/packages/source/p/pymunk/pymunk-{version}.zip"
depends = ["cffi", "setuptools"]
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super().get_recipe_env(arch)
env["LDFLAGS"] += " -llog"
return env
recipe = PymunkRecipe()
|
a9fb5d3899e5f7f9c0b964a2eaa0f74df33dc52f
|
scrapple/utils/exceptions.py
|
scrapple/utils/exceptions.py
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
class InvalidType(ValueError):
"""Exception class for invalid type in arguments."""
pass
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise InvalidType("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
Add custom error class 'InvalidType'
|
Add custom error class 'InvalidType'
|
Python
|
mit
|
AlexMathew/scrapple,scrappleapp/scrapple,AlexMathew/scrapple,AlexMathew/scrapple,scrappleapp/scrapple
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
Add custom error class 'InvalidType'
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
class InvalidType(ValueError):
"""Exception class for invalid type in arguments."""
pass
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise InvalidType("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
<commit_before>"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
<commit_msg>Add custom error class 'InvalidType'<commit_after>
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
class InvalidType(ValueError):
"""Exception class for invalid type in arguments."""
pass
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise InvalidType("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
Add custom error class 'InvalidType'"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
class InvalidType(ValueError):
"""Exception class for invalid type in arguments."""
pass
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise InvalidType("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
<commit_before>"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
<commit_msg>Add custom error class 'InvalidType'<commit_after>"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
class InvalidType(ValueError):
"""Exception class for invalid type in arguments."""
pass
def check_arguments(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise InvalidType("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
0757efde915acdf651231bc345c4c1f3ca67d921
|
work/print-traceback.py
|
work/print-traceback.py
|
#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
obj = json.load(sys.stdin)
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
|
#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
def display(obj):
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
for line in sys.stdin.readlines():
obj = json.loads(line)
display(obj)
|
Allow multiple lines of traceback.
|
Allow multiple lines of traceback.
|
Python
|
mit
|
ammongit/scripts,ammongit/scripts,ammongit/scripts,ammongit/scripts
|
#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
obj = json.load(sys.stdin)
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
Allow multiple lines of traceback.
|
#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
def display(obj):
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
for line in sys.stdin.readlines():
obj = json.loads(line)
display(obj)
|
<commit_before>#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
obj = json.load(sys.stdin)
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
<commit_msg>Allow multiple lines of traceback.<commit_after>
|
#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
def display(obj):
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
for line in sys.stdin.readlines():
obj = json.loads(line)
display(obj)
|
#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
obj = json.load(sys.stdin)
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
Allow multiple lines of traceback.#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
def display(obj):
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
for line in sys.stdin.readlines():
obj = json.loads(line)
display(obj)
|
<commit_before>#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
obj = json.load(sys.stdin)
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
<commit_msg>Allow multiple lines of traceback.<commit_after>#!/usr/bin/python3
from pprint import pprint
import json
import sys
def get(obj, path):
try:
for part in path:
obj = obj[part]
return obj
except KeyError:
return None
def display(obj):
for path in paths:
subobj = get(obj, path)
if subobj is not None:
obj = subobj
break
if isinstance(obj, str):
print(obj)
else:
pprint(obj)
if __name__ == '__main__':
if len(sys.argv) >= 2:
paths = [sys.argv[1].split('.')]
else:
paths = [
['meta', 'error', 'stack'],
['error', 'stack'],
['traceback'],
]
for line in sys.stdin.readlines():
obj = json.loads(line)
display(obj)
|
88e91100cf191b5320ed20678aca835601f7031c
|
doc/ext/cinder_autodoc.py
|
doc/ext/cinder_autodoc.py
|
from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
|
Add Apache 2.0 license to source file
|
Add Apache 2.0 license to source file
Source code should be licensed under the Apache 2.0 license.
All source files should have the licensing header.
Change-Id: I67df47560d87182265ec4fa973bddaf356829fc1
|
Python
|
apache-2.0
|
eharney/cinder,Datera/cinder,j-griffith/cinder,phenoxim/cinder,phenoxim/cinder,mahak/cinder,openstack/cinder,Datera/cinder,mahak/cinder,openstack/cinder,ge0rgi/cinder,j-griffith/cinder,eharney/cinder
|
from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
Add Apache 2.0 license to source file
Source code should be licensed under the Apache 2.0 license.
All source files should have the licensing header.
Change-Id: I67df47560d87182265ec4fa973bddaf356829fc1
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
|
<commit_before>from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
<commit_msg>Add Apache 2.0 license to source file
Source code should be licensed under the Apache 2.0 license.
All source files should have the licensing header.
Change-Id: I67df47560d87182265ec4fa973bddaf356829fc1<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
|
from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
Add Apache 2.0 license to source file
Source code should be licensed under the Apache 2.0 license.
All source files should have the licensing header.
Change-Id: I67df47560d87182265ec4fa973bddaf356829fc1# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
|
<commit_before>from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
<commit_msg>Add Apache 2.0 license to source file
Source code should be licensed under the Apache 2.0 license.
All source files should have the licensing header.
Change-Id: I67df47560d87182265ec4fa973bddaf356829fc1<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import gettext
import os
gettext.install('cinder')
from cinder import utils
def setup(app):
print("**Autodocumenting from %s" % os.path.abspath(os.curdir))
rv = utils.execute('./doc/generate_autodoc_index.sh')
print(rv[0])
|
26b79c227ac13bcad686bec6670f585b2d202e33
|
module/plugins/accounts/ReloadCc.py
|
module/plugins/accounts/ReloadCc.py
|
from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.accounts[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)
|
from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.infos[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)
|
Change self.accounts to self.info for getting the cached password hash
|
Change self.accounts to self.info for getting the cached password hash
|
Python
|
agpl-3.0
|
vuolter/pyload,vuolter/pyload,pyblub/pyload,pyblub/pyload,vuolter/pyload
|
from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.accounts[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)Change self.accounts to self.info for getting the cached password hash
|
from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.infos[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)
|
<commit_before>from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.accounts[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)<commit_msg>Change self.accounts to self.info for getting the cached password hash<commit_after>
|
from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.infos[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)
|
from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.accounts[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)Change self.accounts to self.info for getting the cached password hashfrom module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.infos[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)
|
<commit_before>from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.accounts[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)<commit_msg>Change self.accounts to self.info for getting the cached password hash<commit_after>from module.plugins.Account import Account
from module.common.json_layer import json_loads
class ReloadCc(Account):
__name__ = "ReloadCc"
__version__ = "0.1"
__type__ = "account"
__description__ = """Reload.Cc account plugin"""
__author_name__ = ("Reload Team")
__author_mail__ = ("hello@reload.cc")
def loadAccountInfo(self, user, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Parse account info
account_info = {"validuntil": float(status['msg']['expires']),
"pwdhash": status['msg']['hash'],
"trafficleft": -1}
return account_info
def login(self, user, data, req):
# Get user data from reload.cc
status = self.getAccountStatus(user, req)
# Check if user and password are valid
if status['status'] != "ok":
self.wrongPassword()
def getAccountStatus(self, user, req):
pwd = "pwd=%s" % self.accounts[user]['password']
try:
pwd = "hash=%s" % self.infos[user]['pwdhash']
except Exception:
pass
# Use reload.cc API v1 to retrieve account info and return the parsed json answer
answer = req.load("https://api.reload.cc/login?via=pyload&v=1&get_traffic=true&user=%s&%s" % (user, pwd))
return json_loads(answer)
|
b56c1cb1185c8d20276688f29509947cb46a26d4
|
test/test_compiled.py
|
test/test_compiled.py
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures_extension():
# with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
def test_call_signatures_stdlib():
code = "import math; math.cos("
s = jedi.Script(code)
defs = s.call_signatures()
for call_def in defs:
for p in call_def.params:
assert str(p) == 'x'
|
Add test with standard lib
|
Add test with standard lib
math.cos( should return <Param: x @0,0>
|
Python
|
mit
|
WoLpH/jedi,WoLpH/jedi,dwillmer/jedi,jonashaag/jedi,mfussenegger/jedi,flurischt/jedi,mfussenegger/jedi,dwillmer/jedi,jonashaag/jedi,tjwei/jedi,tjwei/jedi,flurischt/jedi
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
passAdd test with standard lib
math.cos( should return <Param: x @0,0>
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures_extension():
# with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
def test_call_signatures_stdlib():
code = "import math; math.cos("
s = jedi.Script(code)
defs = s.call_signatures()
for call_def in defs:
for p in call_def.params:
assert str(p) == 'x'
|
<commit_before>"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass<commit_msg>Add test with standard lib
math.cos( should return <Param: x @0,0><commit_after>
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures_extension():
# with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
def test_call_signatures_stdlib():
code = "import math; math.cos("
s = jedi.Script(code)
defs = s.call_signatures()
for call_def in defs:
for p in call_def.params:
assert str(p) == 'x'
|
"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
passAdd test with standard lib
math.cos( should return <Param: x @0,0>"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures_extension():
# with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
def test_call_signatures_stdlib():
code = "import math; math.cos("
s = jedi.Script(code)
defs = s.call_signatures()
for call_def in defs:
for p in call_def.params:
assert str(p) == 'x'
|
<commit_before>"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass<commit_msg>Add test with standard lib
math.cos( should return <Param: x @0,0><commit_after>"""
Test compiled module
"""
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures_extension():
# with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
def test_call_signatures_stdlib():
code = "import math; math.cos("
s = jedi.Script(code)
defs = s.call_signatures()
for call_def in defs:
for p in call_def.params:
assert str(p) == 'x'
|
85d0bc9fbb20daeff9aa48a83be1823fa346cb9c
|
tests/test_helpers.py
|
tests/test_helpers.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert items.next()['page'] == 1
assert items.next()['page'] == 2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert next(items)['page'] == 1
assert next(items)['page'] == 2
|
Fix tests for Python 3
|
Fix tests for Python 3
|
Python
|
mit
|
alexandriagroup/rakuten-ws
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert items.next()['page'] == 1
assert items.next()['page'] == 2
Fix tests for Python 3
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert next(items)['page'] == 1
assert next(items)['page'] == 2
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert items.next()['page'] == 1
assert items.next()['page'] == 2
<commit_msg>Fix tests for Python 3<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert next(items)['page'] == 1
assert next(items)['page'] == 2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert items.next()['page'] == 1
assert items.next()['page'] == 2
Fix tests for Python 3# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert next(items)['page'] == 1
assert next(items)['page'] == 2
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert items.next()['page'] == 1
assert items.next()['page'] == 2
<commit_msg>Fix tests for Python 3<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
import types
from rakuten_ws.webservice import RakutenWebService
from rakuten_ws.base import RakutenAPIResponse
@pytest.mark.online
def test_response(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
assert isinstance(response, RakutenAPIResponse)
@pytest.mark.online
def test_single_item(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
item = response['Items'][0]
assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa
@pytest.mark.online
def test_item_pages(credentials):
ws = RakutenWebService(**credentials)
response = ws.ichiba.item.search(keyword="Naruto")
items = response.pages()
# search should also allow to retrieve all the available responses
# within a generator
assert isinstance(items, types.GeneratorType)
# The iteration should switch to the next page
assert next(items)['page'] == 1
assert next(items)['page'] == 2
|
36477e0737897fd717e3cbde4b05cb210d335440
|
tests/test_refresh.py
|
tests/test_refresh.py
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from .helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
|
Use “helpers” as independent module for “tests.runtests” environment
|
Use “helpers” as independent module for “tests.runtests” environment
|
Python
|
mit
|
bjoernricks/python-quilt
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from .helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
Use “helpers” as independent module for “tests.runtests” environment
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
|
<commit_before># vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from .helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
<commit_msg>Use “helpers” as independent module for “tests.runtests” environment<commit_after>
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from .helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
Use “helpers” as independent module for “tests.runtests” environment# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
|
<commit_before># vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from .helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
<commit_msg>Use “helpers” as independent module for “tests.runtests” environment<commit_after># vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# See LICENSE comming with the source of python-quilt for details.
import os
from helpers import make_file
from unittest import TestCase
import quilt.refresh
from quilt.db import Db, Patch
from quilt.utils import TmpDirectory
class Test(TestCase):
def test_refresh(self):
with TmpDirectory() as dir:
old_dir = os.getcwd()
try:
os.chdir(dir.get_name())
db = Db(".pc")
db.create()
backup = os.path.join(".pc", "patch")
os.mkdir(backup)
make_file(b"", backup, "file")
db.add_patch(Patch("patch"))
db.save()
make_file(b"", "patch")
make_file(b"added\n", "file")
cmd = quilt.refresh.Refresh(".", ".pc", ".")
cmd.refresh()
with open("patch", "r") as patch:
self.assertTrue(patch.read(30))
finally:
os.chdir(old_dir)
|
1083201467c2305966dd2c36e9d7b147ced891e2
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/contrib/customlogging.py
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/contrib/customlogging.py
|
from .request import GLOBALS
from django.conf import settings
import logging
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
if hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
|
import logging
from django.conf import settings
from .request import GLOBALS
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
# check for request._cached_user before accessing the user object,
# this means django.contrib.auth.middleware.get_user has returned.
# This avoid hitting a recursive loop when the "django" logger level = "DEBUG".
if hasattr(GLOBALS.request, "_cached_user") and hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
|
Fix recursive logging error wheni GrayFilter is in use and "django" logger is at DEBUG leve
|
Fix recursive logging error wheni GrayFilter is in use and "django" logger is at DEBUG leve
|
Python
|
apache-2.0
|
Benoss/django-cookiecutter,Benoss/django-cookiecutter,Benoss/django-cookiecutter,Benoss/django-cookiecutter
|
from .request import GLOBALS
from django.conf import settings
import logging
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
if hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
Fix recursive logging error wheni GrayFilter is in use and "django" logger is at DEBUG leve
|
import logging
from django.conf import settings
from .request import GLOBALS
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
# check for request._cached_user before accessing the user object,
# this means django.contrib.auth.middleware.get_user has returned.
# This avoid hitting a recursive loop when the "django" logger level = "DEBUG".
if hasattr(GLOBALS.request, "_cached_user") and hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
|
<commit_before>from .request import GLOBALS
from django.conf import settings
import logging
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
if hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
<commit_msg>Fix recursive logging error wheni GrayFilter is in use and "django" logger is at DEBUG leve<commit_after>
|
import logging
from django.conf import settings
from .request import GLOBALS
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
# check for request._cached_user before accessing the user object,
# this means django.contrib.auth.middleware.get_user has returned.
# This avoid hitting a recursive loop when the "django" logger level = "DEBUG".
if hasattr(GLOBALS.request, "_cached_user") and hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
|
from .request import GLOBALS
from django.conf import settings
import logging
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
if hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
Fix recursive logging error wheni GrayFilter is in use and "django" logger is at DEBUG leveimport logging
from django.conf import settings
from .request import GLOBALS
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
# check for request._cached_user before accessing the user object,
# this means django.contrib.auth.middleware.get_user has returned.
# This avoid hitting a recursive loop when the "django" logger level = "DEBUG".
if hasattr(GLOBALS.request, "_cached_user") and hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
|
<commit_before>from .request import GLOBALS
from django.conf import settings
import logging
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
if hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
<commit_msg>Fix recursive logging error wheni GrayFilter is in use and "django" logger is at DEBUG leve<commit_after>import logging
from django.conf import settings
from .request import GLOBALS
class GrayFilter(logging.Filter): # pragma: no cover
def filter(self, record):
# check for request._cached_user before accessing the user object,
# this means django.contrib.auth.middleware.get_user has returned.
# This avoid hitting a recursive loop when the "django" logger level = "DEBUG".
if hasattr(GLOBALS.request, "_cached_user") and hasattr(GLOBALS, "user") and GLOBALS.user:
record.user_id = GLOBALS.user.id
record.env = settings.STATIC_LOGGING.get("env")
return True
|
f18957ca1986317e8987183633c39f1987e316c4
|
pgcontents/__init__.py
|
pgcontents/__init__.py
|
from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
|
# Do this first so that we bail early with a useful message if the user didn't
# specify [ipy3] or [ipy4].
try:
import IPython # noqa
except ImportError:
raise ImportError(
"No IPython installation found.\n"
"To install pgcontents with the latest Jupyter Notebook"
" run 'pip install pgcontents[ipy4]b'.\n"
"To install with the legacy IPython Notebook"
" run 'pip install pgcontents[ipy3]'.\n"
)
from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
|
Add warning if IPython isn't installed.
|
DOC: Add warning if IPython isn't installed.
|
Python
|
apache-2.0
|
quantopian/pgcontents
|
from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
DOC: Add warning if IPython isn't installed.
|
# Do this first so that we bail early with a useful message if the user didn't
# specify [ipy3] or [ipy4].
try:
import IPython # noqa
except ImportError:
raise ImportError(
"No IPython installation found.\n"
"To install pgcontents with the latest Jupyter Notebook"
" run 'pip install pgcontents[ipy4]b'.\n"
"To install with the legacy IPython Notebook"
" run 'pip install pgcontents[ipy3]'.\n"
)
from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
|
<commit_before>from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
<commit_msg>DOC: Add warning if IPython isn't installed.<commit_after>
|
# Do this first so that we bail early with a useful message if the user didn't
# specify [ipy3] or [ipy4].
try:
import IPython # noqa
except ImportError:
raise ImportError(
"No IPython installation found.\n"
"To install pgcontents with the latest Jupyter Notebook"
" run 'pip install pgcontents[ipy4]b'.\n"
"To install with the legacy IPython Notebook"
" run 'pip install pgcontents[ipy3]'.\n"
)
from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
|
from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
DOC: Add warning if IPython isn't installed.# Do this first so that we bail early with a useful message if the user didn't
# specify [ipy3] or [ipy4].
try:
import IPython # noqa
except ImportError:
raise ImportError(
"No IPython installation found.\n"
"To install pgcontents with the latest Jupyter Notebook"
" run 'pip install pgcontents[ipy4]b'.\n"
"To install with the legacy IPython Notebook"
" run 'pip install pgcontents[ipy3]'.\n"
)
from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
|
<commit_before>from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
<commit_msg>DOC: Add warning if IPython isn't installed.<commit_after># Do this first so that we bail early with a useful message if the user didn't
# specify [ipy3] or [ipy4].
try:
import IPython # noqa
except ImportError:
raise ImportError(
"No IPython installation found.\n"
"To install pgcontents with the latest Jupyter Notebook"
" run 'pip install pgcontents[ipy4]b'.\n"
"To install with the legacy IPython Notebook"
" run 'pip install pgcontents[ipy3]'.\n"
)
from .checkpoints import PostgresCheckpoints
from .pgmanager import PostgresContentsManager
__all__ = [
'PostgresCheckpoints',
'PostgresContentsManager',
]
|
cc44afdca3ebcdaeed3555f161d3e0a1992c19eb
|
planet/api/__init__.py
|
planet/api/__init__.py
|
# Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
# Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
Put api.__version__ back in after version shuffle
|
Put api.__version__ back in after version shuffle
|
Python
|
apache-2.0
|
planetlabs/planet-client-python,planetlabs/planet-client-python
|
# Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
Put api.__version__ back in after version shuffle
|
# Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
<commit_before># Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
<commit_msg>Put api.__version__ back in after version shuffle<commit_after>
|
# Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
# Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
Put api.__version__ back in after version shuffle# Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
<commit_before># Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
<commit_msg>Put api.__version__ back in after version shuffle<commit_after># Copyright 2017 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .exceptions import (APIException, BadQuery, InvalidAPIKey)
from .exceptions import (NoPermission, MissingResource, OverQuota)
from .exceptions import (ServerError, RequestCancelled, TooManyRequests)
from .client import (ClientV1)
from .utils import write_to_file
from . import filters
from .__version__ import __version__ # NOQA
__all__ = [
ClientV1, APIException, BadQuery, InvalidAPIKey,
NoPermission, MissingResource, OverQuota, ServerError, RequestCancelled,
TooManyRequests,
write_to_file,
filters
]
|
268c4dce6cfa59e10cff7f4bf8456276c2e11f7d
|
main.py
|
main.py
|
#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use self file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from config import Config
import os
import versionComparator
def main():
cf = os.path.join(os.path.expanduser('~'), '.KSP-AVC','default.cfg')
cfg = Config(cf)
print cfg.get('install_dir')
#Shutdown procedure
if cfg.need_save():
cfg.save()
sys.exit(0)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import config
import os, sys
import versionComparator as verComp
def main():
# Find config folder.
cfg_dir = os.path.join(os.path.expanduser('~'), '.KSP-AVC')
# Create it if needed.
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
# Create config object.
cfg = config.Config(os.path.join(cfg_dir, 'default.cfg'))
for mod in findMods(cfg):
remote = verComp.getRemote(mod)
print "Found version file %s reporting remote %s" % (mod, remote)
# comp = verComp.versionComparator(mod, remote)
#Shutdown procedure
cfg.save()
sys.exit(0)
def findMods(cfg):
mods = set()
# Walk through the directories
for path, folders, files in os.walk(cfg.get('gamedata_dir')):
# Walk through the files.
for f in files:
# Found a version file.
if f.lower().endswith(".version"):
mods.add(os.path.join(path, f))
return mods
# Startup sequence
if __name__ == '__main__':
main()
|
Create config paths if needed, find files and (for now) report remotes.
|
Create config paths if needed, find files and (for now) report remotes.
|
Python
|
apache-2.0
|
tyrope/KSP-addon-version-checker
|
#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use self file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from config import Config
import os
import versionComparator
def main():
cf = os.path.join(os.path.expanduser('~'), '.KSP-AVC','default.cfg')
cfg = Config(cf)
print cfg.get('install_dir')
#Shutdown procedure
if cfg.need_save():
cfg.save()
sys.exit(0)
if __name__ == '__main__':
main()
Create config paths if needed, find files and (for now) report remotes.
|
#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import config
import os, sys
import versionComparator as verComp
def main():
# Find config folder.
cfg_dir = os.path.join(os.path.expanduser('~'), '.KSP-AVC')
# Create it if needed.
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
# Create config object.
cfg = config.Config(os.path.join(cfg_dir, 'default.cfg'))
for mod in findMods(cfg):
remote = verComp.getRemote(mod)
print "Found version file %s reporting remote %s" % (mod, remote)
# comp = verComp.versionComparator(mod, remote)
#Shutdown procedure
cfg.save()
sys.exit(0)
def findMods(cfg):
mods = set()
# Walk through the directories
for path, folders, files in os.walk(cfg.get('gamedata_dir')):
# Walk through the files.
for f in files:
# Found a version file.
if f.lower().endswith(".version"):
mods.add(os.path.join(path, f))
return mods
# Startup sequence
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use self file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from config import Config
import os
import versionComparator
def main():
cf = os.path.join(os.path.expanduser('~'), '.KSP-AVC','default.cfg')
cfg = Config(cf)
print cfg.get('install_dir')
#Shutdown procedure
if cfg.need_save():
cfg.save()
sys.exit(0)
if __name__ == '__main__':
main()
<commit_msg>Create config paths if needed, find files and (for now) report remotes.<commit_after>
|
#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import config
import os, sys
import versionComparator as verComp
def main():
# Find config folder.
cfg_dir = os.path.join(os.path.expanduser('~'), '.KSP-AVC')
# Create it if needed.
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
# Create config object.
cfg = config.Config(os.path.join(cfg_dir, 'default.cfg'))
for mod in findMods(cfg):
remote = verComp.getRemote(mod)
print "Found version file %s reporting remote %s" % (mod, remote)
# comp = verComp.versionComparator(mod, remote)
#Shutdown procedure
cfg.save()
sys.exit(0)
def findMods(cfg):
mods = set()
# Walk through the directories
for path, folders, files in os.walk(cfg.get('gamedata_dir')):
# Walk through the files.
for f in files:
# Found a version file.
if f.lower().endswith(".version"):
mods.add(os.path.join(path, f))
return mods
# Startup sequence
if __name__ == '__main__':
main()
|
#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use self file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from config import Config
import os
import versionComparator
def main():
cf = os.path.join(os.path.expanduser('~'), '.KSP-AVC','default.cfg')
cfg = Config(cf)
print cfg.get('install_dir')
#Shutdown procedure
if cfg.need_save():
cfg.save()
sys.exit(0)
if __name__ == '__main__':
main()
Create config paths if needed, find files and (for now) report remotes.#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import config
import os, sys
import versionComparator as verComp
def main():
# Find config folder.
cfg_dir = os.path.join(os.path.expanduser('~'), '.KSP-AVC')
# Create it if needed.
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
# Create config object.
cfg = config.Config(os.path.join(cfg_dir, 'default.cfg'))
for mod in findMods(cfg):
remote = verComp.getRemote(mod)
print "Found version file %s reporting remote %s" % (mod, remote)
# comp = verComp.versionComparator(mod, remote)
#Shutdown procedure
cfg.save()
sys.exit(0)
def findMods(cfg):
mods = set()
# Walk through the directories
for path, folders, files in os.walk(cfg.get('gamedata_dir')):
# Walk through the files.
for f in files:
# Found a version file.
if f.lower().endswith(".version"):
mods.add(os.path.join(path, f))
return mods
# Startup sequence
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use self file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from config import Config
import os
import versionComparator
def main():
cf = os.path.join(os.path.expanduser('~'), '.KSP-AVC','default.cfg')
cfg = Config(cf)
print cfg.get('install_dir')
#Shutdown procedure
if cfg.need_save():
cfg.save()
sys.exit(0)
if __name__ == '__main__':
main()
<commit_msg>Create config paths if needed, find files and (for now) report remotes.<commit_after>#!/usr/bin/env python2.7
#coding: utf8
"""
KSP Add-on Version Checker.
"""
# Copyright 2014 Dimitri "Tyrope" Molenaars
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import config
import os, sys
import versionComparator as verComp
def main():
# Find config folder.
cfg_dir = os.path.join(os.path.expanduser('~'), '.KSP-AVC')
# Create it if needed.
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
# Create config object.
cfg = config.Config(os.path.join(cfg_dir, 'default.cfg'))
for mod in findMods(cfg):
remote = verComp.getRemote(mod)
print "Found version file %s reporting remote %s" % (mod, remote)
# comp = verComp.versionComparator(mod, remote)
#Shutdown procedure
cfg.save()
sys.exit(0)
def findMods(cfg):
mods = set()
# Walk through the directories
for path, folders, files in os.walk(cfg.get('gamedata_dir')):
# Walk through the files.
for f in files:
# Found a version file.
if f.lower().endswith(".version"):
mods.add(os.path.join(path, f))
return mods
# Startup sequence
if __name__ == '__main__':
main()
|
9fe11538a9d74ff235b530a71d2399fe6c03a88a
|
tests/rules_tests/FromRulesComputeTest.py
|
tests/rules_tests/FromRulesComputeTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromRulesComputeTest(TestCase):
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from grammpy.exceptions.NotASingleSymbolException import NotASingleSymbolException
from grammpy.exceptions.CantCreateSingleRuleException import CantCreateSingleRuleException
class OneRule(Rule):
rules = [([0], [1])]
class OneRuleTwoRight(Rule):
rules = [([0], [1, 2])]
class TwoRules(Rule):
rules = [([0], [1]), ([2], [3, 4])]
class FromRulesComputeTest(TestCase):
def test_leftRightFromOne(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1])
def test_ruleFromOne(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1])
def test_leftRightSymbolFromOne(self):
self.assertEqual(OneRule.fromSymbol, 0)
self.assertEqual(OneRule.toSymbol, 1)
def test_leftRightFromTwoRight(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1, 2])
def test_ruleFromTwoRight(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1, 2])
def test_leftRightSymbolFromTwoRight(self):
self.assertEqual(OneRule.fromSymbol, 0)
with self.assertRaises(NotASingleSymbolException):
x = OneRule.toSymbol
def test_leftRightFromTwo(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.left
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.right
def test_ruleFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
r = TwoRules.rule
def test_leftRightSymbolFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.fromSymbol
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.toSymbol
if __name__ == '__main__':
main()
|
Add tests to compute from rules property
|
Add tests to compute from rules property
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromRulesComputeTest(TestCase):
pass
if __name__ == '__main__':
main()
Add tests to compute from rules property
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from grammpy.exceptions.NotASingleSymbolException import NotASingleSymbolException
from grammpy.exceptions.CantCreateSingleRuleException import CantCreateSingleRuleException
class OneRule(Rule):
rules = [([0], [1])]
class OneRuleTwoRight(Rule):
rules = [([0], [1, 2])]
class TwoRules(Rule):
rules = [([0], [1]), ([2], [3, 4])]
class FromRulesComputeTest(TestCase):
def test_leftRightFromOne(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1])
def test_ruleFromOne(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1])
def test_leftRightSymbolFromOne(self):
self.assertEqual(OneRule.fromSymbol, 0)
self.assertEqual(OneRule.toSymbol, 1)
def test_leftRightFromTwoRight(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1, 2])
def test_ruleFromTwoRight(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1, 2])
def test_leftRightSymbolFromTwoRight(self):
self.assertEqual(OneRule.fromSymbol, 0)
with self.assertRaises(NotASingleSymbolException):
x = OneRule.toSymbol
def test_leftRightFromTwo(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.left
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.right
def test_ruleFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
r = TwoRules.rule
def test_leftRightSymbolFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.fromSymbol
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.toSymbol
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromRulesComputeTest(TestCase):
pass
if __name__ == '__main__':
main()
<commit_msg>Add tests to compute from rules property<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from grammpy.exceptions.NotASingleSymbolException import NotASingleSymbolException
from grammpy.exceptions.CantCreateSingleRuleException import CantCreateSingleRuleException
class OneRule(Rule):
rules = [([0], [1])]
class OneRuleTwoRight(Rule):
rules = [([0], [1, 2])]
class TwoRules(Rule):
rules = [([0], [1]), ([2], [3, 4])]
class FromRulesComputeTest(TestCase):
def test_leftRightFromOne(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1])
def test_ruleFromOne(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1])
def test_leftRightSymbolFromOne(self):
self.assertEqual(OneRule.fromSymbol, 0)
self.assertEqual(OneRule.toSymbol, 1)
def test_leftRightFromTwoRight(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1, 2])
def test_ruleFromTwoRight(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1, 2])
def test_leftRightSymbolFromTwoRight(self):
self.assertEqual(OneRule.fromSymbol, 0)
with self.assertRaises(NotASingleSymbolException):
x = OneRule.toSymbol
def test_leftRightFromTwo(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.left
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.right
def test_ruleFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
r = TwoRules.rule
def test_leftRightSymbolFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.fromSymbol
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.toSymbol
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromRulesComputeTest(TestCase):
pass
if __name__ == '__main__':
main()
Add tests to compute from rules property#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from grammpy.exceptions.NotASingleSymbolException import NotASingleSymbolException
from grammpy.exceptions.CantCreateSingleRuleException import CantCreateSingleRuleException
class OneRule(Rule):
rules = [([0], [1])]
class OneRuleTwoRight(Rule):
rules = [([0], [1, 2])]
class TwoRules(Rule):
rules = [([0], [1]), ([2], [3, 4])]
class FromRulesComputeTest(TestCase):
def test_leftRightFromOne(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1])
def test_ruleFromOne(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1])
def test_leftRightSymbolFromOne(self):
self.assertEqual(OneRule.fromSymbol, 0)
self.assertEqual(OneRule.toSymbol, 1)
def test_leftRightFromTwoRight(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1, 2])
def test_ruleFromTwoRight(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1, 2])
def test_leftRightSymbolFromTwoRight(self):
self.assertEqual(OneRule.fromSymbol, 0)
with self.assertRaises(NotASingleSymbolException):
x = OneRule.toSymbol
def test_leftRightFromTwo(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.left
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.right
def test_ruleFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
r = TwoRules.rule
def test_leftRightSymbolFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.fromSymbol
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.toSymbol
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromRulesComputeTest(TestCase):
pass
if __name__ == '__main__':
main()
<commit_msg>Add tests to compute from rules property<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from grammpy.exceptions.NotASingleSymbolException import NotASingleSymbolException
from grammpy.exceptions.CantCreateSingleRuleException import CantCreateSingleRuleException
class OneRule(Rule):
rules = [([0], [1])]
class OneRuleTwoRight(Rule):
rules = [([0], [1, 2])]
class TwoRules(Rule):
rules = [([0], [1]), ([2], [3, 4])]
class FromRulesComputeTest(TestCase):
def test_leftRightFromOne(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1])
def test_ruleFromOne(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1])
def test_leftRightSymbolFromOne(self):
self.assertEqual(OneRule.fromSymbol, 0)
self.assertEqual(OneRule.toSymbol, 1)
def test_leftRightFromTwoRight(self):
self.assertEqual(OneRule.left, [0])
self.assertEqual(OneRule.right, [1, 2])
def test_ruleFromTwoRight(self):
r = OneRule.rule
self.assertIsInstance(r, tuple)
self.assertEqual(r[0], [0])
self.assertEqual(r[1], [1, 2])
def test_leftRightSymbolFromTwoRight(self):
self.assertEqual(OneRule.fromSymbol, 0)
with self.assertRaises(NotASingleSymbolException):
x = OneRule.toSymbol
def test_leftRightFromTwo(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.left
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.right
def test_ruleFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
r = TwoRules.rule
def test_leftRightSymbolFromOne(self):
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.fromSymbol
with self.assertRaises(CantCreateSingleRuleException):
x = TwoRules.toSymbol
if __name__ == '__main__':
main()
|
fbfae080cc59e2faae4c8ece21e4aa2970efee1e
|
encmass.py
|
encmass.py
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = (r/rs)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y.value)).to(u.Msun)
return encmass
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = ((r/rs).to(u.dimensionless_unscaled).value)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y)/alpha).to(u.Msun)
return encmass
|
Fix error in enclosed mass calculation and fix unit handling.
|
Fix error in enclosed mass calculation and fix unit handling.
|
Python
|
bsd-2-clause
|
lauralwatkins/genhernquist
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = (r/rs)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y.value)).to(u.Msun)
return encmass
Fix error in enclosed mass calculation and fix unit handling.
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = ((r/rs).to(u.dimensionless_unscaled).value)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y)/alpha).to(u.Msun)
return encmass
|
<commit_before>#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = (r/rs)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y.value)).to(u.Msun)
return encmass
<commit_msg>Fix error in enclosed mass calculation and fix unit handling.<commit_after>
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = ((r/rs).to(u.dimensionless_unscaled).value)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y)/alpha).to(u.Msun)
return encmass
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = (r/rs)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y.value)).to(u.Msun)
return encmass
Fix error in enclosed mass calculation and fix unit handling.#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = ((r/rs).to(u.dimensionless_unscaled).value)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y)/alpha).to(u.Msun)
return encmass
|
<commit_before>#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = (r/rs)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y.value)).to(u.Msun)
return encmass
<commit_msg>Fix error in enclosed mass calculation and fix unit handling.<commit_after>#!/usr/bin/env python
# -----------------------------------------------------------------------------
# GENHERNQUIST.ENCMASS
# Laura L Watkins [lauralwatkins@gmail.com]
# -----------------------------------------------------------------------------
import numpy as np
from astropy import units as u
from scipy import special
def encmass(r, norm, rs, alpha, beta, gamma):
"""
Enclosed mass profile of a generalised Hernquist model.
INPUTS
r : radial variable (requires unit)
norm : normalisation (requires unit)
rs : scale radius of model (requires unit)
alpha : sharpness of transition between inner and outer
beta : outer logarithmic slope
gamma : inner logarithmic slope
"""
a = (3.-gamma)/alpha
b = (gamma-beta)/alpha
y = ((r/rs).to(u.dimensionless_unscaled).value)**alpha
fn = lambda x: x**a * special.hyp2f1(a, -b, 1+a, -x)/a
encmass = (4*np.pi*norm*rs**3*fn(y)/alpha).to(u.Msun)
return encmass
|
5ee3eb2f68e4af8e70ea383b067fe67ffd4800bf
|
loadsbroker/webapp/__init__.py
|
loadsbroker/webapp/__init__.py
|
import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA})
])
|
import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA, "default_filename": "index.html"})
])
|
Set the `default_filename` for `GrafanaHandler`.
|
Set the `default_filename` for `GrafanaHandler`.
|
Python
|
apache-2.0
|
loads/loads-broker,loads/loads-broker,loads/loads-broker,loads/loads-broker
|
import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA})
])
Set the `default_filename` for `GrafanaHandler`.
|
import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA, "default_filename": "index.html"})
])
|
<commit_before>import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA})
])
<commit_msg>Set the `default_filename` for `GrafanaHandler`.<commit_after>
|
import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA, "default_filename": "index.html"})
])
|
import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA})
])
Set the `default_filename` for `GrafanaHandler`.import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA, "default_filename": "index.html"})
])
|
<commit_before>import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA})
])
<commit_msg>Set the `default_filename` for `GrafanaHandler`.<commit_after>import os
import tornado.web
from loadsbroker.webapp.api import RootHandler, RunHandler
from loadsbroker.webapp.views import GrafanaHandler
_GRAFANA = os.path.join(os.path.dirname(__file__), 'grafana')
application = tornado.web.Application([
(r"/api", RootHandler),
(r"/api/run/(.*)", RunHandler),
(r"/dashboards/run/(.*)", GrafanaHandler, {"path": _GRAFANA, "default_filename": "index.html"})
])
|
ddd8f1a8fab0f77943d7a47e1d154d1104add26e
|
ievv_opensource/ievv_batchframework/rq_tasks.py
|
ievv_opensource/ievv_batchframework/rq_tasks.py
|
from __future__ import absolute_import
import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
|
import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
|
Remove dependency on the future lib.
|
Remove dependency on the future lib.
|
Python
|
bsd-3-clause
|
appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource
|
from __future__ import absolute_import
import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
Remove dependency on the future lib.
|
import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
|
<commit_before>from __future__ import absolute_import
import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
<commit_msg>Remove dependency on the future lib.<commit_after>
|
import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
|
from __future__ import absolute_import
import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
Remove dependency on the future lib.import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
|
<commit_before>from __future__ import absolute_import
import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
<commit_msg>Remove dependency on the future lib.<commit_after>import django_rq
from ievv_opensource.ievv_batchframework.models import BatchOperation
from ievv_opensource.ievv_batchframework import batchregistry
import logging
class BatchActionGroupTask(object):
abstract = True
def run_actiongroup(self, actiongroup_name, batchoperation_id, **kwargs):
try:
batchoperation = BatchOperation.objects\
.get(id=batchoperation_id, status=BatchOperation.STATUS_UNPROCESSED)
except BatchOperation.DoesNotExist:
logging.warning('BatchOperation with id={} does not exist, or is already running.')
return
else:
batchoperation.mark_as_running()
registry = batchregistry.Registry.get_instance()
full_kwargs = {
'started_by': batchoperation.started_by,
'context_object': batchoperation.context_object,
}
full_kwargs.update(kwargs)
actiongroupresult = registry.get_actiongroup(actiongroup_name)\
.run_blocking(**full_kwargs)
batchoperation.finish(failed=actiongroupresult.failed,
output_data=actiongroupresult.to_dict())
@django_rq.job('default')
def default(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
@django_rq.job('highpriority')
def highpriority(**kwargs):
BatchActionGroupTask().run_actiongroup(**kwargs)
|
1d74d333cdf6d25150afc93febc8141ea3c655b0
|
sirius/LI_V00/lattice.py
|
sirius/LI_V00/lattice.py
|
#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = [150e-9,300e-9] #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
|
#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = 150e-9 #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
|
Change Linac pulse duration interval to 150 ns
|
Change Linac pulse duration interval to 150 ns
|
Python
|
mit
|
lnls-fac/sirius
|
#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = [150e-9,300e-9] #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
Change Linac pulse duration interval to 150 ns
|
#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = 150e-9 #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
|
<commit_before>#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = [150e-9,300e-9] #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
<commit_msg>Change Linac pulse duration interval to 150 ns<commit_after>
|
#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = 150e-9 #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
|
#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = [150e-9,300e-9] #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
Change Linac pulse duration interval to 150 ns#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = 150e-9 #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
|
<commit_before>#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = [150e-9,300e-9] #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
<commit_msg>Change Linac pulse duration interval to 150 ns<commit_after>#!/usr/bin/env python3
import math as _math
import numpy as _np
import pyaccel as _pyaccel
import mathphys as _mp
from . import optics_mode_M0 as _optics_mode_M0
_default_optics_mode = _optics_mode_M0
_energy = 0.15e9 #[eV]
_emittance = 170.3329758677203e-09 #[m rad]
_energy_spread = 0.005
_single_bunch_charge = 1e-9 #[Coulomb]
_multi_bunch_charge = 3e-9 #[Coulomb]
_pulse_duration_interval = 150e-9 #[seconds]
_frequency = 3e9 #[Hz]
def create_lattice(**kwargs):
marker = _pyaccel.elements.marker
linac = marker('linac')
elist = [linac]
the_line = _pyaccel.lattice.build(elist)
return the_line
|
144264fcf06b24c8676e99bff5abb08e7bc936fb
|
comics/comics/fminus.py
|
comics/comics/fminus.py
|
from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2001-02-02"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
|
from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2005-05-10"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
|
Update history capability for "F Minus"
|
Update history capability for "F Minus"
|
Python
|
agpl-3.0
|
jodal/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics
|
from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2001-02-02"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
Update history capability for "F Minus"
|
from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2005-05-10"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
|
<commit_before>from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2001-02-02"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
<commit_msg>Update history capability for "F Minus"<commit_after>
|
from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2005-05-10"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
|
from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2001-02-02"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
Update history capability for "F Minus"from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2005-05-10"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
|
<commit_before>from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2001-02-02"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
<commit_msg>Update history capability for "F Minus"<commit_after>from comics.aggregator.crawler import GoComicsComCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "F Minus"
language = "en"
url = "http://www.gocomics.com/fminus"
start_date = "1999-09-01"
rights = "Tony Carrillo"
class Crawler(GoComicsComCrawlerBase):
history_capable_date = "2005-05-10"
schedule = "Mo,Tu,We,Th,Fr,Sa,Su"
time_zone = "US/Mountain"
def crawl(self, pub_date):
return self.crawl_helper("fminus", pub_date)
|
7168d7dc1695228e7711124395f41c3d52651a04
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.70.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
|
from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.71.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
|
Use boost 1.71.0 for conan
|
Use boost 1.71.0 for conan
https://github.com/conan-io/conan-center-index/issues/214#issuecomment-564074114
|
Python
|
mit
|
elsid/resource_pool,elsid/resource_pool,elsid/resource_pool
|
from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.70.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
Use boost 1.71.0 for conan
https://github.com/conan-io/conan-center-index/issues/214#issuecomment-564074114
|
from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.71.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
|
<commit_before>from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.70.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
<commit_msg>Use boost 1.71.0 for conan
https://github.com/conan-io/conan-center-index/issues/214#issuecomment-564074114<commit_after>
|
from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.71.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
|
from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.70.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
Use boost 1.71.0 for conan
https://github.com/conan-io/conan-center-index/issues/214#issuecomment-564074114from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.71.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
|
<commit_before>from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.70.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
<commit_msg>Use boost 1.71.0 for conan
https://github.com/conan-io/conan-center-index/issues/214#issuecomment-564074114<commit_after>from conans import ConanFile, CMake
from conans.tools import load
import re
def get_version():
try:
content = load("CMakeLists.txt")
version = re.search(r"^\s*project\(resource_pool\s+VERSION\s+([^\s]+)", content, re.M).group(1)
return version.strip()
except Exception:
return None
class ResourcePool(ConanFile):
name = 'resource_pool'
version = get_version()
license = 'MIT'
url = 'https://github.com/elsid/resource_pool'
description = 'Conan package for elsid resource pool'
exports_sources = 'include/*', 'CMakeLists.txt', 'resource_poolConfig.cmake', 'LICENCE', 'AUTHORS'
generators = 'cmake_paths'
requires = 'boost/1.71.0@conan/stable'
def _configure_cmake(self):
cmake = CMake(self)
cmake.configure()
return cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
cmake = self._configure_cmake()
cmake.install()
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.libs = ['resource_pool']
|
dd739126181b29493c9d1d90a7e40eac09c23666
|
app/models.py
|
app/models.py
|
# -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
# -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
Add unique constraint to rid
|
Add unique constraint to rid
|
Python
|
mit
|
reubano/hdxscraper-hdro,reubano/hdxscraper-hdro,reubano/hdxscraper-hdro
|
# -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
Add unique constraint to rid
|
# -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
<commit_before># -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
<commit_msg>Add unique constraint to rid<commit_after>
|
# -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
# -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
Add unique constraint to rid# -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
<commit_before># -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
<commit_msg>Add unique constraint to rid<commit_after># -*- coding: utf-8 -*-
"""
app.models
~~~~~~~~~~
Provides the SQLAlchemy models
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
import savalidation.validators as val
from datetime import datetime as dt
from app import db
from savalidation import ValidationMixin
class HDRO(db.Model, ValidationMixin):
# auto keys
id = db.Column(db.Integer, primary_key=True)
utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow())
utc_updated = db.Column(
db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow())
# other keys
rid = db.Column(db.String(16), nullable=False, index=True, unique=True)
country = db.Column(db.String(32), nullable=False)
indicator = db.Column(db.String(128), nullable=False)
value = db.Column(db.Numeric, nullable=False)
year = db.Column(db.Integer, nullable=False)
# validation
val.validates_constraints()
def __repr__(self):
return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
|
4ac335e2ac69f634d51ab8b84805947fe2b87fc5
|
app.py
|
app.py
|
#!notify/bin/python3
import os
import sys
from pushbullet import Pushbullet
def create_note(title, content):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if len(sys.argv) >= 3:
title = sys.argv[1]
body = sys.argv[2]
create_note(title, body)
else:
print("Error: Missing arguments")
|
#!notify/bin/python3
import hug
import os
from pushbullet import Pushbullet
@hug.cli()
def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if __name__ == '__main__':
create_note.interface.cli()
|
Migrate command line interface to hug
|
Migrate command line interface to hug
|
Python
|
isc
|
tildecross/tildex-notify
|
#!notify/bin/python3
import os
import sys
from pushbullet import Pushbullet
def create_note(title, content):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if len(sys.argv) >= 3:
title = sys.argv[1]
body = sys.argv[2]
create_note(title, body)
else:
print("Error: Missing arguments")
Migrate command line interface to hug
|
#!notify/bin/python3
import hug
import os
from pushbullet import Pushbullet
@hug.cli()
def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if __name__ == '__main__':
create_note.interface.cli()
|
<commit_before>#!notify/bin/python3
import os
import sys
from pushbullet import Pushbullet
def create_note(title, content):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if len(sys.argv) >= 3:
title = sys.argv[1]
body = sys.argv[2]
create_note(title, body)
else:
print("Error: Missing arguments")
<commit_msg>Migrate command line interface to hug<commit_after>
|
#!notify/bin/python3
import hug
import os
from pushbullet import Pushbullet
@hug.cli()
def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if __name__ == '__main__':
create_note.interface.cli()
|
#!notify/bin/python3
import os
import sys
from pushbullet import Pushbullet
def create_note(title, content):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if len(sys.argv) >= 3:
title = sys.argv[1]
body = sys.argv[2]
create_note(title, body)
else:
print("Error: Missing arguments")
Migrate command line interface to hug#!notify/bin/python3
import hug
import os
from pushbullet import Pushbullet
@hug.cli()
def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if __name__ == '__main__':
create_note.interface.cli()
|
<commit_before>#!notify/bin/python3
import os
import sys
from pushbullet import Pushbullet
def create_note(title, content):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if len(sys.argv) >= 3:
title = sys.argv[1]
body = sys.argv[2]
create_note(title, body)
else:
print("Error: Missing arguments")
<commit_msg>Migrate command line interface to hug<commit_after>#!notify/bin/python3
import hug
import os
from pushbullet import Pushbullet
@hug.cli()
def create_note(title: hug.types.text, content: hug.types.text):
api_key = os.environ["PB_API_KEY"]
pb = Pushbullet(api_key)
pb.push_note(title, content)
if __name__ == '__main__':
create_note.interface.cli()
|
fe768f5d8c1081f69acd8cf656aa618da7caf93b
|
cbpos/mod/currency/views/config.py
|
cbpos/mod/currency/views/config.py
|
from PySide import QtGui
import cbpos
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = cbpos.config['mod.currency', 'default']
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
from PySide import QtGui
import cbpos
import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
Handle unset default currency better
|
Handle unset default currency better
|
Python
|
mit
|
coinbox/coinbox-mod-currency
|
from PySide import QtGui
import cbpos
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = cbpos.config['mod.currency', 'default']
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
Handle unset default currency better
|
from PySide import QtGui
import cbpos
import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
<commit_before>from PySide import QtGui
import cbpos
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = cbpos.config['mod.currency', 'default']
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
<commit_msg>Handle unset default currency better<commit_after>
|
from PySide import QtGui
import cbpos
import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
from PySide import QtGui
import cbpos
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = cbpos.config['mod.currency', 'default']
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
Handle unset default currency betterfrom PySide import QtGui
import cbpos
import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
<commit_before>from PySide import QtGui
import cbpos
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = cbpos.config['mod.currency', 'default']
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
<commit_msg>Handle unset default currency better<commit_after>from PySide import QtGui
import cbpos
import cbpos.mod.currency.controllers as currency
from cbpos.mod.currency.models.currency import Currency
class CurrencyConfigPage(QtGui.QWidget):
label = 'Currency'
def __init__(self):
super(CurrencyConfigPage, self).__init__()
self.default = QtGui.QComboBox()
form = QtGui.QFormLayout()
form.setSpacing(10)
form.addRow('Default Currency', self.default)
self.setLayout(form)
def populate(self):
session = cbpos.database.session()
default_id = currency.default.id
selected_index = -1
self.default.clear()
for i, c in enumerate(session.query(Currency)):
self.default.addItem(c.display, c)
if default_id == c.id:
selected_index = i
self.default.setCurrentIndex(selected_index)
def update(self):
default = self.default.itemData(self.default.currentIndex())
cbpos.config['mod.currency', 'default'] = unicode(default.id)
|
2c2eac755245562446161e355d3436d6e662147c
|
notification/backends/email.py
|
notification/backends/email.py
|
from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 2
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
|
from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 3
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
|
Set notification.backends.EmailBackend.sensitivity = 3, so that it has a different sensitivity from the WebBackend
|
Set notification.backends.EmailBackend.sensitivity = 3, so that it has a different sensitivity from the WebBackend
|
Python
|
mit
|
theatlantic/django-notification,theatlantic/django-notification
|
from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 2
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
Set notification.backends.EmailBackend.sensitivity = 3, so that it has a different sensitivity from the WebBackend
|
from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 3
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
|
<commit_before>from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 2
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
<commit_msg>Set notification.backends.EmailBackend.sensitivity = 3, so that it has a different sensitivity from the WebBackend<commit_after>
|
from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 3
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
|
from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 2
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
Set notification.backends.EmailBackend.sensitivity = 3, so that it has a different sensitivity from the WebBackendfrom django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 3
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
|
<commit_before>from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 2
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
<commit_msg>Set notification.backends.EmailBackend.sensitivity = 3, so that it has a different sensitivity from the WebBackend<commit_after>from django.conf import settings
from django.core.mail import EmailMessage
from notification.backends.base import NotificationBackend
class EmailBackend(NotificationBackend):
sensitivity = 3
slug = u'email'
display_name = u'E-mail'
formats = ['short.txt', 'full.txt']
def email_for_user(self, recipient):
return recipient.email
def should_send(self, sender, recipient, notice_type, *args, **kwargs):
send = super(EmailBackend, self).should_send(sender, recipient,
notice_type)
return send and self.email_for_user(recipient) != ''
def render_subject(self, label, context):
# Strip newlines from subject
return ''.join(self.render_message(label,
'notification/email_subject.txt', 'short.txt', context
).splitlines())
def send(self, sender, recipient, notice_type, context, *args, **kwargs):
if not self.should_send(sender, recipient, notice_type):
return False
headers = kwargs.get('headers', {})
headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL)
EmailMessage(self.render_subject(notice_type.label, context),
self.render_message(notice_type.label,
'notification/email_body.txt',
'full.txt',
context),
kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL,
[self.email_for_user(recipient)],
headers=headers).send()
return True
|
85faea2a9185924d1255e84aad1489f7e3627d13
|
django_lightweight_queue/utils.py
|
django_lightweight_queue/utils.py
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
Add setproctitle wrapper so it's optional.
|
Add setproctitle wrapper so it's optional.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
Python
|
bsd-3-clause
|
prophile/django-lightweight-queue,prophile/django-lightweight-queue,thread/django-lightweight-queue,lamby/django-lightweight-queue,thread/django-lightweight-queue
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
Add setproctitle wrapper so it's optional.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
<commit_before>from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
<commit_msg>Add setproctitle wrapper so it's optional.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
Add setproctitle wrapper so it's optional.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
<commit_before>from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
<commit_msg>Add setproctitle wrapper so it's optional.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
694079aa480072e97043f968547941404f303c75
|
array/quick-sort.py
|
array/quick-sort.py
|
# Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
|
# Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
|
Apply recursion to quick sort
|
Apply recursion to quick sort
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
# Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
Apply recursion to quick sort
|
# Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
|
<commit_before># Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
<commit_msg>Apply recursion to quick sort<commit_after>
|
# Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
|
# Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
Apply recursion to quick sort# Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
|
<commit_before># Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
<commit_msg>Apply recursion to quick sort<commit_after># Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
|
5345cfddc24303a7d27b61865306775f93bb908c
|
django_lightweight_queue/utils.py
|
django_lightweight_queue/utils.py
|
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
from django.db.models import get_apps
from django.core.exceptions import MiddlewareNotUsed
from django.utils.importlib import import_module
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
def import_all_submodules(name):
for app_module in get_apps():
parts = app_module.__name__.split('.')
prefix, last = parts[:-1], parts[-1]
try:
import_module('.'.join(prefix + [name]))
except ImportError:
# Distinguise between tasks.py existing and failing to import
if last == 'models':
app_module = import_module('.'.join(prefix))
if module_has_submodule(app_module, name):
raise
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
Add utility to import all submodules of all apps.
|
Add utility to import all submodules of all apps.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,thread/django-lightweight-queue,prophile/django-lightweight-queue,prophile/django-lightweight-queue,lamby/django-lightweight-queue
|
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
Add utility to import all submodules of all apps.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
from django.db.models import get_apps
from django.core.exceptions import MiddlewareNotUsed
from django.utils.importlib import import_module
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
def import_all_submodules(name):
for app_module in get_apps():
parts = app_module.__name__.split('.')
prefix, last = parts[:-1], parts[-1]
try:
import_module('.'.join(prefix + [name]))
except ImportError:
# Distinguise between tasks.py existing and failing to import
if last == 'models':
app_module = import_module('.'.join(prefix))
if module_has_submodule(app_module, name):
raise
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
<commit_before>from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
<commit_msg>Add utility to import all submodules of all apps.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>
|
from django.db.models import get_apps
from django.core.exceptions import MiddlewareNotUsed
from django.utils.importlib import import_module
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
def import_all_submodules(name):
for app_module in get_apps():
parts = app_module.__name__.split('.')
prefix, last = parts[:-1], parts[-1]
try:
import_module('.'.join(prefix + [name]))
except ImportError:
# Distinguise between tasks.py existing and failing to import
if last == 'models':
app_module = import_module('.'.join(prefix))
if module_has_submodule(app_module, name):
raise
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
Add utility to import all submodules of all apps.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>from django.db.models import get_apps
from django.core.exceptions import MiddlewareNotUsed
from django.utils.importlib import import_module
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
def import_all_submodules(name):
for app_module in get_apps():
parts = app_module.__name__.split('.')
prefix, last = parts[:-1], parts[-1]
try:
import_module('.'.join(prefix + [name]))
except ImportError:
# Distinguise between tasks.py existing and failing to import
if last == 'models':
app_module = import_module('.'.join(prefix))
if module_has_submodule(app_module, name):
raise
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
<commit_before>from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
<commit_msg>Add utility to import all submodules of all apps.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>from django.db.models import get_apps
from django.core.exceptions import MiddlewareNotUsed
from django.utils.importlib import import_module
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
def import_all_submodules(name):
for app_module in get_apps():
parts = app_module.__name__.split('.')
prefix, last = parts[:-1], parts[-1]
try:
import_module('.'.join(prefix + [name]))
except ImportError:
# Distinguise between tasks.py existing and failing to import
if last == 'models':
app_module = import_module('.'.join(prefix))
if module_has_submodule(app_module, name):
raise
try:
import setproctitle
original_title = setproctitle.getproctitle()
def set_process_title(title):
setproctitle.setproctitle("%s [%s]" % (original_title, title))
except ImportError:
def set_process_title(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
5853c4449ff3e2ac04e96ab8c601609b4b24f267
|
flaskapp.py
|
flaskapp.py
|
import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
flag = dont_tread_on_memes.dont_me(caption)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
|
import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
# Color argument
color = flask.request.args.get("color")
if color is None:
color = "black"
# Allow disabling of formatting
should_format = flask.request.args.get("format")
if should_format == "false":
flag = dont_tread_on_memes.tread_on(caption, color)
else:
flag = dont_tread_on_memes.dont_me(caption, color)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
|
Implement some URL parameter options
|
Implement some URL parameter options
|
Python
|
mit
|
controversial/dont-tread-on-memes
|
import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
flag = dont_tread_on_memes.dont_me(caption)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
Implement some URL parameter options
|
import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
# Color argument
color = flask.request.args.get("color")
if color is None:
color = "black"
# Allow disabling of formatting
should_format = flask.request.args.get("format")
if should_format == "false":
flag = dont_tread_on_memes.tread_on(caption, color)
else:
flag = dont_tread_on_memes.dont_me(caption, color)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
flag = dont_tread_on_memes.dont_me(caption)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Implement some URL parameter options<commit_after>
|
import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
# Color argument
color = flask.request.args.get("color")
if color is None:
color = "black"
# Allow disabling of formatting
should_format = flask.request.args.get("format")
if should_format == "false":
flag = dont_tread_on_memes.tread_on(caption, color)
else:
flag = dont_tread_on_memes.dont_me(caption, color)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
|
import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
flag = dont_tread_on_memes.dont_me(caption)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
Implement some URL parameter optionsimport io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
# Color argument
color = flask.request.args.get("color")
if color is None:
color = "black"
# Allow disabling of formatting
should_format = flask.request.args.get("format")
if should_format == "false":
flag = dont_tread_on_memes.tread_on(caption, color)
else:
flag = dont_tread_on_memes.dont_me(caption, color)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
flag = dont_tread_on_memes.dont_me(caption)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Implement some URL parameter options<commit_after>import io
import dont_tread_on_memes
import flask
app = flask.Flask(__name__)
@app.route("/", defaults={"caption": "tread on"})
@app.route("/<caption>/")
def main(caption):
# Color argument
color = flask.request.args.get("color")
if color is None:
color = "black"
# Allow disabling of formatting
should_format = flask.request.args.get("format")
if should_format == "false":
flag = dont_tread_on_memes.tread_on(caption, color)
else:
flag = dont_tread_on_memes.dont_me(caption, color)
data = io.BytesIO()
flag.save(data, "PNG")
data.seek(0)
return flask.send_file(data, mimetype="image/png")
if __name__ == "__main__":
app.run(debug=True)
|
f225ffecf061470b877388d26c1605248b9611da
|
ygorcam.py
|
ygorcam.py
|
#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["picamera", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
|
#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["raspistill", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((process.returncode, stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
|
Use raspistill and provide some additional error info
|
Use raspistill and provide some additional error info
|
Python
|
mit
|
f0rk/ygorcam
|
#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["picamera", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
Use raspistill and provide some additional error info
|
#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["raspistill", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((process.returncode, stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
|
<commit_before>#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["picamera", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
<commit_msg>Use raspistill and provide some additional error info<commit_after>
|
#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["raspistill", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((process.returncode, stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
|
#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["picamera", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
Use raspistill and provide some additional error info#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["raspistill", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((process.returncode, stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
|
<commit_before>#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["picamera", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
<commit_msg>Use raspistill and provide some additional error info<commit_after>#!/usr/bin/env python
import tempfile
import subprocess
import web
urls = ("/camera", "Camera")
app = web.application(urls, globals())
class Camera(object):
def GET(self):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tfp:
process = subprocess.Popen(["raspistill", "-o", tfp.name])
stdout, stderr = process.communicate()
if process.returncode:
raise Exception((process.returncode, stdout, stderr))
web.header("Content-Type", "image/jpeg")
return tfp.read()
if __name__ == "__main__":
app.run()
|
7a290359a0800c8f94da2b3f74dcb7153c4c27ed
|
nomnom/tests/functional_tests.py
|
nomnom/tests/functional_tests.py
|
from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'NomNom Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
|
from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'Import Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
|
Correct button text in functional test
|
Correct button text in functional test
|
Python
|
mit
|
storyandstructure/django-nomnom
|
from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'NomNom Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
Correct button text in functional test
|
from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'Import Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
|
<commit_before>from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'NomNom Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
<commit_msg>Correct button text in functional test<commit_after>
|
from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'Import Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
|
from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'NomNom Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
Correct button text in functional testfrom django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'Import Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
|
<commit_before>from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'NomNom Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
<commit_msg>Correct button text in functional test<commit_after>from django_webtest import WebTest
from bs4 import BeautifulSoup
from django.contrib.admin.models import User
class NomnomTest(WebTest):
fixtures = ['users.json',]
def test_can_access_nomnom(self):
# An administrator visits the admin site
response = self.app.get('/admin/')
soup = BeautifulSoup('<html>%s' % response.html)
title = soup.find('title')
self.assertEqual('Log in | Django site admin', title.text)
# As a non-staff user, I cannot access nomnom's import page
nomnom_auth_groups = self.app.get('/nomnom/auth/group/import/')
self.assertContains(nomnom_auth_groups, text='id="login-form"', status_code=200)
# As an administrator, I can click the Import button so that I can
# import files.
user = self.app.get('/admin/auth/user/', user="admin")
assert 'Import Users' in user.click('Import users')
# As an administrator, I can click the Export button so that I can
# export files.
# user = self.app.get('/admin/auth/user')
# assert 'Export Users' in user.click('Export users')
self.fail('TODO')
|
cc62a1eea746a7191b4a07a48dcf55f4c76787ee
|
asyncpg/__init__.py
|
asyncpg/__init__.py
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
Use loop.create_future if it exists
|
Use loop.create_future if it exists
|
Python
|
apache-2.0
|
MagicStack/asyncpg,MagicStack/asyncpg
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
Use loop.create_future if it exists
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
<commit_before>import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
<commit_msg>Use loop.create_future if it exists<commit_after>
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
Use loop.create_future if it existsimport asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
<commit_before>import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
<commit_msg>Use loop.create_future if it exists<commit_after>import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
2b5d5d7f551d36af457ef357004dba2484e51572
|
spec_cleaner/rpmbuild.py
|
spec_cleaner/rpmbuild.py
|
# vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
|
# vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake') and not self.previous_line.startswith('# FIXME'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
|
Fix repetitive adding of cmake macro recommendation
|
Fix repetitive adding of cmake macro recommendation
|
Python
|
bsd-3-clause
|
plusky/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,pombredanne/spec-cleaner,pombredanne/spec-cleaner,plusky/spec-cleaner
|
# vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
Fix repetitive adding of cmake macro recommendation
|
# vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake') and not self.previous_line.startswith('# FIXME'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
|
<commit_before># vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
<commit_msg>Fix repetitive adding of cmake macro recommendation<commit_after>
|
# vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake') and not self.previous_line.startswith('# FIXME'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
|
# vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
Fix repetitive adding of cmake macro recommendation# vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake') and not self.previous_line.startswith('# FIXME'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
|
<commit_before># vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
<commit_msg>Fix repetitive adding of cmake macro recommendation<commit_after># vim: set ts=4 sw=4 et: coding=UTF-8
# We basically extend rpmcheck
from .rpmcheck import RpmCheck
class RpmBuild(RpmCheck):
"""
Replace various troublemakers in build phase
"""
def add(self, line):
# if user uses cmake directly just recommend him using the macros
if line.startswith('cmake') and not self.previous_line.startswith('# FIXME'):
self.lines.append('# FIXME: you should use %%cmake macros')
RpmCheck.add(self, line)
|
64b4c9e44c5305801f45e23efd2bc0843588afef
|
src/sentry/api/endpoints/organization_projects.py
|
src/sentry/api/endpoints/organization_projects.py
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
def get(self, request, organization):
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.base import DocSection
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
doc_section = DocSection.ORGANIZATIONS
def get(self, request, organization):
"""
List an organization's projects
Return a list of projects bound to a organization.
{method} {path}
"""
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
|
Add organization projects to docs
|
Add organization projects to docs
|
Python
|
bsd-3-clause
|
ngonzalvez/sentry,argonemyth/sentry,imankulov/sentry,wujuguang/sentry,JackDanger/sentry,llonchj/sentry,BayanGroup/sentry,felixbuenemann/sentry,ifduyue/sentry,gencer/sentry,jean/sentry,gencer/sentry,gencer/sentry,jean/sentry,1tush/sentry,wujuguang/sentry,BuildingLink/sentry,imankulov/sentry,kevinastone/sentry,looker/sentry,daevaorn/sentry,songyi199111/sentry,nicholasserra/sentry,ngonzalvez/sentry,JackDanger/sentry,BuildingLink/sentry,korealerts1/sentry,vperron/sentry,ngonzalvez/sentry,mitsuhiko/sentry,zenefits/sentry,llonchj/sentry,Natim/sentry,jokey2k/sentry,vperron/sentry,Kryz/sentry,ewdurbin/sentry,TedaLIEz/sentry,gencer/sentry,pauloschilling/sentry,alexm92/sentry,gg7/sentry,alexm92/sentry,JamesMura/sentry,ifduyue/sentry,beeftornado/sentry,korealerts1/sentry,TedaLIEz/sentry,imankulov/sentry,ifduyue/sentry,hongliang5623/sentry,JTCunning/sentry,wong2/sentry,looker/sentry,JamesMura/sentry,mvaled/sentry,pauloschilling/sentry,boneyao/sentry,zenefits/sentry,kevinlondon/sentry,mitsuhiko/sentry,kevinlondon/sentry,fuziontech/sentry,mvaled/sentry,ifduyue/sentry,jean/sentry,wujuguang/sentry,zenefits/sentry,Kryz/sentry,BuildingLink/sentry,JamesMura/sentry,ifduyue/sentry,wong2/sentry,mvaled/sentry,drcapulet/sentry,ewdurbin/sentry,jean/sentry,kevinastone/sentry,llonchj/sentry,wong2/sentry,felixbuenemann/sentry,jokey2k/sentry,looker/sentry,boneyao/sentry,Kryz/sentry,jokey2k/sentry,mvaled/sentry,songyi199111/sentry,daevaorn/sentry,argonemyth/sentry,fotinakis/sentry,looker/sentry,beeftornado/sentry,1tush/sentry,daevaorn/sentry,Natim/sentry,JamesMura/sentry,daevaorn/sentry,JTCunning/sentry,zenefits/sentry,alexm92/sentry,JamesMura/sentry,nicholasserra/sentry,drcapulet/sentry,BayanGroup/sentry,kevinlondon/sentry,vperron/sentry,korealerts1/sentry,beeftornado/sentry,boneyao/sentry,zenefits/sentry,JTCunning/sentry,JackDanger/sentry,kevinastone/sentry,BuildingLink/sentry,gg7/sentry,fuziontech/sentry,nicholasserra/sentry,looker/sentry,fotinakis/sentry,1tush/sentry,ewdurbin/sentry,fuziontech/sentry,gencer/sentry,felixbuenemann/sentry,TedaLIEz/sentry,Natim/sentry,argonemyth/sentry,jean/sentry,fotinakis/sentry,songyi199111/sentry,mvaled/sentry,fotinakis/sentry,gg7/sentry,pauloschilling/sentry,drcapulet/sentry,hongliang5623/sentry,BayanGroup/sentry,mvaled/sentry,BuildingLink/sentry,hongliang5623/sentry
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
def get(self, request, organization):
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
Add organization projects to docs
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.base import DocSection
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
doc_section = DocSection.ORGANIZATIONS
def get(self, request, organization):
"""
List an organization's projects
Return a list of projects bound to a organization.
{method} {path}
"""
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
|
<commit_before>from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
def get(self, request, organization):
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
<commit_msg>Add organization projects to docs<commit_after>
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.base import DocSection
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
doc_section = DocSection.ORGANIZATIONS
def get(self, request, organization):
"""
List an organization's projects
Return a list of projects bound to a organization.
{method} {path}
"""
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
def get(self, request, organization):
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
Add organization projects to docsfrom __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.base import DocSection
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
doc_section = DocSection.ORGANIZATIONS
def get(self, request, organization):
"""
List an organization's projects
Return a list of projects bound to a organization.
{method} {path}
"""
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
|
<commit_before>from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
def get(self, request, organization):
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
<commit_msg>Add organization projects to docs<commit_after>from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api.base import DocSection
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.permissions import assert_perm
from sentry.api.serializers import serialize
from sentry.models import Project, Team
class OrganizationProjectsEndpoint(OrganizationEndpoint):
doc_section = DocSection.ORGANIZATIONS
def get(self, request, organization):
"""
List an organization's projects
Return a list of projects bound to a organization.
{method} {path}
"""
assert_perm(organization, request.user, request.auth)
team_list = Team.objects.get_for_user(
organization=organization,
user=request.user,
)
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(
team=team,
user=request.user,
))
project_list.sort(key=lambda x: x.name)
team_map = dict(
(t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)),
)
context = []
for project, pdata in zip(project_list, serialize(project_list, request.user)):
pdata['team'] = team_map[project.team_id]
context.append(pdata)
return Response(context)
|
1596e1cdd1a9907c6efa29e89b68f57e21b2fc01
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
|
Create documentation of DataSource Settings
|
: Create documentation of DataSource Settings
Task-Url:
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ): Create documentation of DataSource Settings
Task-Url:
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>: Create documentation of DataSource Settings
Task-Url: <commit_after>
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ): Create documentation of DataSource Settings
Task-Url: ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>: Create documentation of DataSource Settings
Task-Url: <commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
|
ab8abaf874613a6d31ee0dad77f18e2cfc18db41
|
docs/conf.py
|
docs/conf.py
|
from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
|
from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosectionlabel',
]
autosectionlabel_prefix_document = True
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
|
Add sphinx autosection label plugin.
|
Add sphinx autosection label plugin.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
Add sphinx autosection label plugin.
|
from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosectionlabel',
]
autosectionlabel_prefix_document = True
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
|
<commit_before>from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
<commit_msg>Add sphinx autosection label plugin.<commit_after>
|
from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosectionlabel',
]
autosectionlabel_prefix_document = True
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
|
from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
Add sphinx autosection label plugin.from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosectionlabel',
]
autosectionlabel_prefix_document = True
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
|
<commit_before>from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
<commit_msg>Add sphinx autosection label plugin.<commit_after>from datetime import date
import guzzle_sphinx_theme
from pyinfra import __version__
copyright = 'Nick Barrett {0} — pyinfra v{1}'.format(
date.today().year,
__version__,
)
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosectionlabel',
]
autosectionlabel_prefix_document = True
extensions.append('guzzle_sphinx_theme')
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_short_title = 'Home'
html_theme = 'guzzle_sphinx_theme'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_static_path = ['static']
templates_path = ['templates']
html_favicon = 'static/logo_small.png'
html_sidebars = {
'**': ['pyinfra_sidebar.html', 'searchbox.html'],
}
|
44297159b6539987ed8fcdb50cd5b3e367a9cdc2
|
db/sql_server/pyodbc.py
|
db/sql_server/pyodbc.py
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
Add column support for sql server
|
Add column support for sql server
|
Python
|
apache-2.0
|
smartfile/django-south,smartfile/django-south
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
Add column support for sql server
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
<commit_before>from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
<commit_msg>Add column support for sql server<commit_after>
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
Add column support for sql serverfrom django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
<commit_before>from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
<commit_msg>Add column support for sql server<commit_after>from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
a0d0d1312d95e1d6df2b8463b9df3ff4178c2802
|
setup.py
|
setup.py
|
from distutils.core import setup
import glob,re
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt'])]
)
|
from distutils.core import setup
import glob,re, os
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
def get_test_files():
"""
Gets the unit tests and configs for them
"""
files = []
for root, subFolders, files in os.walk('tests'):
for name in files:
files.append(os.path.join(root,file))
return filter(lambda x: '.svn' not in x, files)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt']),
('/usr/share/osg-configure', get_test_files())]
)
|
Include test files when packaging osg-configure
|
Include test files when packaging osg-configure
git-svn-id: 8e6470fdf0410dbd375d7ca1c7e7b1f4e5857e13@14556 4e558342-562e-0410-864c-e07659590f8c
|
Python
|
apache-2.0
|
opensciencegrid/osg-configure,opensciencegrid/osg-configure,matyasselmeci/osg-configure,matyasselmeci/osg-configure
|
from distutils.core import setup
import glob,re
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt'])]
)
Include test files when packaging osg-configure
git-svn-id: 8e6470fdf0410dbd375d7ca1c7e7b1f4e5857e13@14556 4e558342-562e-0410-864c-e07659590f8c
|
from distutils.core import setup
import glob,re, os
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
def get_test_files():
"""
Gets the unit tests and configs for them
"""
files = []
for root, subFolders, files in os.walk('tests'):
for name in files:
files.append(os.path.join(root,file))
return filter(lambda x: '.svn' not in x, files)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt']),
('/usr/share/osg-configure', get_test_files())]
)
|
<commit_before>from distutils.core import setup
import glob,re
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt'])]
)
<commit_msg>Include test files when packaging osg-configure
git-svn-id: 8e6470fdf0410dbd375d7ca1c7e7b1f4e5857e13@14556 4e558342-562e-0410-864c-e07659590f8c<commit_after>
|
from distutils.core import setup
import glob,re, os
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
def get_test_files():
"""
Gets the unit tests and configs for them
"""
files = []
for root, subFolders, files in os.walk('tests'):
for name in files:
files.append(os.path.join(root,file))
return filter(lambda x: '.svn' not in x, files)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt']),
('/usr/share/osg-configure', get_test_files())]
)
|
from distutils.core import setup
import glob,re
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt'])]
)
Include test files when packaging osg-configure
git-svn-id: 8e6470fdf0410dbd375d7ca1c7e7b1f4e5857e13@14556 4e558342-562e-0410-864c-e07659590f8cfrom distutils.core import setup
import glob,re, os
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
def get_test_files():
"""
Gets the unit tests and configs for them
"""
files = []
for root, subFolders, files in os.walk('tests'):
for name in files:
files.append(os.path.join(root,file))
return filter(lambda x: '.svn' not in x, files)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt']),
('/usr/share/osg-configure', get_test_files())]
)
|
<commit_before>from distutils.core import setup
import glob,re
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt'])]
)
<commit_msg>Include test files when packaging osg-configure
git-svn-id: 8e6470fdf0410dbd375d7ca1c7e7b1f4e5857e13@14556 4e558342-562e-0410-864c-e07659590f8c<commit_after>from distutils.core import setup
import glob,re, os
config_files = glob.glob('config/*.ini')
def get_version():
"""
Gets version from osg-configure script file
"""
buffer = open('scripts/osg-configure').read()
match = re.search("VERSION\s+=\s+'(.*)'", buffer)
return match.group(1)
def get_test_files():
"""
Gets the unit tests and configs for them
"""
files = []
for root, subFolders, files in os.walk('tests'):
for name in files:
files.append(os.path.join(root,file))
return filter(lambda x: '.svn' not in x, files)
setup(name='osg-configure',
version=get_version(),
description='Package for osg-configure and associated scripts',
author='Suchandra Thapa',
author_email='sthapa@ci.uchicago.edu',
url='http://www.opensciencegrid.org',
packages=['osg_configure', 'osg_configure.modules', 'osg_configure.configure_modules'],
scripts=['scripts/osg-configure'],
data_files=[('/etc/osg/config.d', config_files),
('/etc/osg/', ['data_files/grid3-locations.txt']),
('/usr/share/osg-configure', get_test_files())]
)
|
52c02ceda3c6430a2f4bbb3f9054180699baaa93
|
setup.py
|
setup.py
|
# -*- encoding: utf-8 *-*
#!/usr/bin/env python
import sys
from setuptools import setup, Extension
from Cython.Distutils import build_ext
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko', 'Pyrex', 'Cython']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
packages=['darc'],
cmdclass = {'build_ext': build_ext},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', ['darc/hashindex.pyx', 'darc/_hashindex.c'])],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
|
# -*- encoding: utf-8 *-*
#!/usr/bin/env python
import os
import sys
from glob import glob
from setuptools import setup, Extension
from setuptools.command.sdist import sdist
hashindex_sources = ['darc/hashindex.pyx', 'darc/_hashindex.c']
try:
from Cython.Distutils import build_ext
import Cython.Compiler.Main as cython_compiler
class Sdist(sdist):
def __init__(self, *args, **kwargs):
for src in glob('darc/*.pyx'):
print 'src', src
cython_compiler.compile(glob('darc/*.pyx'),
cython_compiler.default_options)
sdist.__init__(self, *args, **kwargs)
def run(self):
sdist.run(self)
self.filelist.append('darc/hashindex.c', 'darc/hashindex.h')
except ImportError:
hashindex_sources[0] = hashindex_sources[0].replace('.pyx', '.c')
from setuptools.command.build_ext import build_ext
Sdist = sdist
if not os.path.exists('darc/hashindex.c'):
raise ImportError('The GIT version of darc needs Cython. Install Cython or use a released version')
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
url='http://github.com/jborg/darc/',
packages=['darc'],
cmdclass = {'build_ext': build_ext, 'sdist': Sdist},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', hashindex_sources)],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
|
Include Cython output in sdist
|
Include Cython output in sdist
|
Python
|
bsd-3-clause
|
ionelmc/borg,pombredanne/attic,edgimar/borg,raxenak/borg,RonnyPfannschmidt/borg,RonnyPfannschmidt/borg,edgimar/borg,mhubig/borg,edgewood/borg,RonnyPfannschmidt/borg,edgewood/borg,raxenak/borg,RonnyPfannschmidt/borg,ionelmc/borg,RonnyPfannschmidt/borg,mhubig/borg,level323/borg,jborg/attic,jborg/attic,mhubig/borg,ionelmc/borg,edgimar/borg,raxenak/borg,level323/borg,edgewood/borg,Teino1978-Corp/Teino1978-Corp-attic,Teino1978-Corp/Teino1978-Corp-attic,raxenak/borg,edgewood/borg,edgimar/borg,pombredanne/attic,level323/borg
|
# -*- encoding: utf-8 *-*
#!/usr/bin/env python
import sys
from setuptools import setup, Extension
from Cython.Distutils import build_ext
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko', 'Pyrex', 'Cython']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
packages=['darc'],
cmdclass = {'build_ext': build_ext},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', ['darc/hashindex.pyx', 'darc/_hashindex.c'])],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
Include Cython output in sdist
|
# -*- encoding: utf-8 *-*
#!/usr/bin/env python
import os
import sys
from glob import glob
from setuptools import setup, Extension
from setuptools.command.sdist import sdist
hashindex_sources = ['darc/hashindex.pyx', 'darc/_hashindex.c']
try:
from Cython.Distutils import build_ext
import Cython.Compiler.Main as cython_compiler
class Sdist(sdist):
def __init__(self, *args, **kwargs):
for src in glob('darc/*.pyx'):
print 'src', src
cython_compiler.compile(glob('darc/*.pyx'),
cython_compiler.default_options)
sdist.__init__(self, *args, **kwargs)
def run(self):
sdist.run(self)
self.filelist.append('darc/hashindex.c', 'darc/hashindex.h')
except ImportError:
hashindex_sources[0] = hashindex_sources[0].replace('.pyx', '.c')
from setuptools.command.build_ext import build_ext
Sdist = sdist
if not os.path.exists('darc/hashindex.c'):
raise ImportError('The GIT version of darc needs Cython. Install Cython or use a released version')
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
url='http://github.com/jborg/darc/',
packages=['darc'],
cmdclass = {'build_ext': build_ext, 'sdist': Sdist},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', hashindex_sources)],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
|
<commit_before># -*- encoding: utf-8 *-*
#!/usr/bin/env python
import sys
from setuptools import setup, Extension
from Cython.Distutils import build_ext
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko', 'Pyrex', 'Cython']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
packages=['darc'],
cmdclass = {'build_ext': build_ext},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', ['darc/hashindex.pyx', 'darc/_hashindex.c'])],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
<commit_msg>Include Cython output in sdist<commit_after>
|
# -*- encoding: utf-8 *-*
#!/usr/bin/env python
import os
import sys
from glob import glob
from setuptools import setup, Extension
from setuptools.command.sdist import sdist
hashindex_sources = ['darc/hashindex.pyx', 'darc/_hashindex.c']
try:
from Cython.Distutils import build_ext
import Cython.Compiler.Main as cython_compiler
class Sdist(sdist):
def __init__(self, *args, **kwargs):
for src in glob('darc/*.pyx'):
print 'src', src
cython_compiler.compile(glob('darc/*.pyx'),
cython_compiler.default_options)
sdist.__init__(self, *args, **kwargs)
def run(self):
sdist.run(self)
self.filelist.append('darc/hashindex.c', 'darc/hashindex.h')
except ImportError:
hashindex_sources[0] = hashindex_sources[0].replace('.pyx', '.c')
from setuptools.command.build_ext import build_ext
Sdist = sdist
if not os.path.exists('darc/hashindex.c'):
raise ImportError('The GIT version of darc needs Cython. Install Cython or use a released version')
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
url='http://github.com/jborg/darc/',
packages=['darc'],
cmdclass = {'build_ext': build_ext, 'sdist': Sdist},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', hashindex_sources)],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
|
# -*- encoding: utf-8 *-*
#!/usr/bin/env python
import sys
from setuptools import setup, Extension
from Cython.Distutils import build_ext
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko', 'Pyrex', 'Cython']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
packages=['darc'],
cmdclass = {'build_ext': build_ext},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', ['darc/hashindex.pyx', 'darc/_hashindex.c'])],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
Include Cython output in sdist# -*- encoding: utf-8 *-*
#!/usr/bin/env python
import os
import sys
from glob import glob
from setuptools import setup, Extension
from setuptools.command.sdist import sdist
hashindex_sources = ['darc/hashindex.pyx', 'darc/_hashindex.c']
try:
from Cython.Distutils import build_ext
import Cython.Compiler.Main as cython_compiler
class Sdist(sdist):
def __init__(self, *args, **kwargs):
for src in glob('darc/*.pyx'):
print 'src', src
cython_compiler.compile(glob('darc/*.pyx'),
cython_compiler.default_options)
sdist.__init__(self, *args, **kwargs)
def run(self):
sdist.run(self)
self.filelist.append('darc/hashindex.c', 'darc/hashindex.h')
except ImportError:
hashindex_sources[0] = hashindex_sources[0].replace('.pyx', '.c')
from setuptools.command.build_ext import build_ext
Sdist = sdist
if not os.path.exists('darc/hashindex.c'):
raise ImportError('The GIT version of darc needs Cython. Install Cython or use a released version')
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
url='http://github.com/jborg/darc/',
packages=['darc'],
cmdclass = {'build_ext': build_ext, 'sdist': Sdist},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', hashindex_sources)],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
|
<commit_before># -*- encoding: utf-8 *-*
#!/usr/bin/env python
import sys
from setuptools import setup, Extension
from Cython.Distutils import build_ext
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko', 'Pyrex', 'Cython']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
packages=['darc'],
cmdclass = {'build_ext': build_ext},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', ['darc/hashindex.pyx', 'darc/_hashindex.c'])],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
<commit_msg>Include Cython output in sdist<commit_after># -*- encoding: utf-8 *-*
#!/usr/bin/env python
import os
import sys
from glob import glob
from setuptools import setup, Extension
from setuptools.command.sdist import sdist
hashindex_sources = ['darc/hashindex.pyx', 'darc/_hashindex.c']
try:
from Cython.Distutils import build_ext
import Cython.Compiler.Main as cython_compiler
class Sdist(sdist):
def __init__(self, *args, **kwargs):
for src in glob('darc/*.pyx'):
print 'src', src
cython_compiler.compile(glob('darc/*.pyx'),
cython_compiler.default_options)
sdist.__init__(self, *args, **kwargs)
def run(self):
sdist.run(self)
self.filelist.append('darc/hashindex.c', 'darc/hashindex.h')
except ImportError:
hashindex_sources[0] = hashindex_sources[0].replace('.pyx', '.c')
from setuptools.command.build_ext import build_ext
Sdist = sdist
if not os.path.exists('darc/hashindex.c'):
raise ImportError('The GIT version of darc needs Cython. Install Cython or use a released version')
dependencies = ['pycrypto', 'msgpack-python', 'pbkdf2.py', 'xattr', 'paramiko']
if sys.version_info < (2, 7):
dependencies.append('argparse')
setup(name='darc',
version='0.1',
author='Jonas Borgström',
author_email='jonas@borgstrom.se',
url='http://github.com/jborg/darc/',
packages=['darc'],
cmdclass = {'build_ext': build_ext, 'sdist': Sdist},
ext_modules=[
Extension('darc._speedups', ['darc/_speedups.c']),
Extension('darc.hashindex', hashindex_sources)],
install_requires=dependencies,
entry_points = {
'console_scripts': [
'darc = darc.archiver:main',
]
})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.