commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8774517714c8c8a7f7a2be9316a23497adfa9f59
|
pi_gpio/urls.py
|
pi_gpio/urls.py
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
import RPi.GPIO as GPIO
def event_callback(pin):
socketio.emit('pin:event', {"message":"woohoo!"})
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
return render_template('index.html')
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
PinEventManager()
return render_template('index.html')
|
Call event manager in index route
|
Call event manager in index route
|
Python
|
mit
|
projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
import RPi.GPIO as GPIO
def event_callback(pin):
socketio.emit('pin:event', {"message":"woohoo!"})
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
return render_template('index.html')
Call event manager in index route
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
PinEventManager()
return render_template('index.html')
|
<commit_before>from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
import RPi.GPIO as GPIO
def event_callback(pin):
socketio.emit('pin:event', {"message":"woohoo!"})
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
return render_template('index.html')
<commit_msg>Call event manager in index route<commit_after>
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
PinEventManager()
return render_template('index.html')
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
import RPi.GPIO as GPIO
def event_callback(pin):
socketio.emit('pin:event', {"message":"woohoo!"})
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
return render_template('index.html')
Call event manager in index routefrom pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
PinEventManager()
return render_template('index.html')
|
<commit_before>from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
import RPi.GPIO as GPIO
def event_callback(pin):
socketio.emit('pin:event', {"message":"woohoo!"})
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
return render_template('index.html')
<commit_msg>Call event manager in index route<commit_after>from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
PinEventManager()
return render_template('index.html')
|
5a36bfb8bb8eceab57203387072f3bf492b2a418
|
src/onixcheck/exeptions.py
|
src/onixcheck/exeptions.py
|
# -*- coding: utf-8 -*-
class OnixError(Exception):
pass
|
# -*- coding: utf-8 -*-
import logging
class OnixError(Exception):
pass
class NullHandler(logging.Handler):
"""Not in python 2.6 so we use our own"""
def emit(self, record):
pass
def get_logger(logger_name='onixcheck', add_null_handler=True):
logger = logging.getLogger(logger_name)
if add_null_handler:
logger.addHandler(NullHandler())
return logger
|
Add NullHandler to silence errors when logging without configuration
|
Add NullHandler to silence errors when logging without configuration
|
Python
|
bsd-2-clause
|
titusz/onixcheck
|
# -*- coding: utf-8 -*-
class OnixError(Exception):
pass
Add NullHandler to silence errors when logging without configuration
|
# -*- coding: utf-8 -*-
import logging
class OnixError(Exception):
pass
class NullHandler(logging.Handler):
"""Not in python 2.6 so we use our own"""
def emit(self, record):
pass
def get_logger(logger_name='onixcheck', add_null_handler=True):
logger = logging.getLogger(logger_name)
if add_null_handler:
logger.addHandler(NullHandler())
return logger
|
<commit_before># -*- coding: utf-8 -*-
class OnixError(Exception):
pass
<commit_msg>Add NullHandler to silence errors when logging without configuration<commit_after>
|
# -*- coding: utf-8 -*-
import logging
class OnixError(Exception):
pass
class NullHandler(logging.Handler):
"""Not in python 2.6 so we use our own"""
def emit(self, record):
pass
def get_logger(logger_name='onixcheck', add_null_handler=True):
logger = logging.getLogger(logger_name)
if add_null_handler:
logger.addHandler(NullHandler())
return logger
|
# -*- coding: utf-8 -*-
class OnixError(Exception):
pass
Add NullHandler to silence errors when logging without configuration# -*- coding: utf-8 -*-
import logging
class OnixError(Exception):
pass
class NullHandler(logging.Handler):
"""Not in python 2.6 so we use our own"""
def emit(self, record):
pass
def get_logger(logger_name='onixcheck', add_null_handler=True):
logger = logging.getLogger(logger_name)
if add_null_handler:
logger.addHandler(NullHandler())
return logger
|
<commit_before># -*- coding: utf-8 -*-
class OnixError(Exception):
pass
<commit_msg>Add NullHandler to silence errors when logging without configuration<commit_after># -*- coding: utf-8 -*-
import logging
class OnixError(Exception):
pass
class NullHandler(logging.Handler):
"""Not in python 2.6 so we use our own"""
def emit(self, record):
pass
def get_logger(logger_name='onixcheck', add_null_handler=True):
logger = logging.getLogger(logger_name)
if add_null_handler:
logger.addHandler(NullHandler())
return logger
|
8ab254490dac4f4ebfed1f43d615c321b5890e29
|
xmlrpclib_to/__init__.py
|
xmlrpclib_to/__init__.py
|
try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
transport = TimeoutTransport(use_datetime, timeout)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]
|
try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
if uri.startswith('http://'):
secure = False
elif uri.startswith('https://'):
secure = True
transport = TimeoutTransport(use_datetime, timeout, secure=secure)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
secure=False):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
self.secure = secure
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
if self.secure:
self._connection = host, httplib.HTTPSConnection(
chost, None, timeout=self.timeout, **(x509 or {})
)
else:
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]
|
FIX working with HTTPS correctly
|
FIX working with HTTPS correctly
|
Python
|
mit
|
gisce/xmlrpclib-to
|
try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
transport = TimeoutTransport(use_datetime, timeout)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]FIX working with HTTPS correctly
|
try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
if uri.startswith('http://'):
secure = False
elif uri.startswith('https://'):
secure = True
transport = TimeoutTransport(use_datetime, timeout, secure=secure)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
secure=False):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
self.secure = secure
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
if self.secure:
self._connection = host, httplib.HTTPSConnection(
chost, None, timeout=self.timeout, **(x509 or {})
)
else:
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]
|
<commit_before>try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
transport = TimeoutTransport(use_datetime, timeout)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]<commit_msg>FIX working with HTTPS correctly<commit_after>
|
try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
if uri.startswith('http://'):
secure = False
elif uri.startswith('https://'):
secure = True
transport = TimeoutTransport(use_datetime, timeout, secure=secure)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
secure=False):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
self.secure = secure
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
if self.secure:
self._connection = host, httplib.HTTPSConnection(
chost, None, timeout=self.timeout, **(x509 or {})
)
else:
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]
|
try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
transport = TimeoutTransport(use_datetime, timeout)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]FIX working with HTTPS correctlytry:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
if uri.startswith('http://'):
secure = False
elif uri.startswith('https://'):
secure = True
transport = TimeoutTransport(use_datetime, timeout, secure=secure)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
secure=False):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
self.secure = secure
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
if self.secure:
self._connection = host, httplib.HTTPSConnection(
chost, None, timeout=self.timeout, **(x509 or {})
)
else:
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]
|
<commit_before>try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
transport = TimeoutTransport(use_datetime, timeout)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]<commit_msg>FIX working with HTTPS correctly<commit_after>try:
import xmlrpclib
from xmlrpclib import *
except ImportError:
# Python 3.0 portability fix...
import xmlrpc.client as xmlrpclib
from xmlrpc.client import *
import httplib
import socket
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0, timeout=None):
if timeout is not None:
if uri.startswith('http://'):
secure = False
elif uri.startswith('https://'):
secure = True
transport = TimeoutTransport(use_datetime, timeout, secure=secure)
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none, use_datetime)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
secure=False):
xmlrpclib.Transport.__init__(self, use_datetime)
self.timeout = timeout
self.secure = secure
def make_connection(self, host):
if self._connection and host == self._connection[0]:
return self._connection[1]
chost, self._extra_headers, x509 = self.get_host_info(host)
if self.secure:
self._connection = host, httplib.HTTPSConnection(
chost, None, timeout=self.timeout, **(x509 or {})
)
else:
self._connection = host, httplib.HTTPConnection(
chost, timeout=self.timeout
)
return self._connection[1]
|
4348927a9ed5bdcbf0284086103e927f45091e15
|
saau/utils/header.py
|
saau/utils/header.py
|
import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
calc = lambda q: q / 20
y_points = map(calc, np.arange(sy, 0, -0.5))
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
|
import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
y_points = (
q / 20
for q in np.arange(sy, 0, -0.5)
)
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
|
Clean up y index generation
|
Clean up y index generation
|
Python
|
mit
|
Mause/statistical_atlas_of_au
|
import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
calc = lambda q: q / 20
y_points = map(calc, np.arange(sy, 0, -0.5))
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
Clean up y index generation
|
import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
y_points = (
q / 20
for q in np.arange(sy, 0, -0.5)
)
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
|
<commit_before>import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
calc = lambda q: q / 20
y_points = map(calc, np.arange(sy, 0, -0.5))
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
<commit_msg>Clean up y index generation<commit_after>
|
import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
y_points = (
q / 20
for q in np.arange(sy, 0, -0.5)
)
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
|
import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
calc = lambda q: q / 20
y_points = map(calc, np.arange(sy, 0, -0.5))
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
Clean up y index generationimport numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
y_points = (
q / 20
for q in np.arange(sy, 0, -0.5)
)
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
|
<commit_before>import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
calc = lambda q: q / 20
y_points = map(calc, np.arange(sy, 0, -0.5))
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
<commit_msg>Clean up y index generation<commit_after>import numpy as np
from lxml.etree import fromstring, XMLSyntaxError
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(font, ax, sy, lines, sx=0.5):
y_points = (
q / 20
for q in np.arange(sy, 0, -0.5)
)
for y, (text, attrs) in zip(y_points, parse_lines(lines)):
line = ax.figure.text(sx, y, text, ha='center')
if 'b' in attrs:
line.set_weight('extra bold')
line.set_font_properties(font)
line.set_fontsize(25)
if 'i' in attrs:
line.set_style('italic')
return ax
|
ebd6d12ca16003e771a7015505be1b42d96483a3
|
roles/gvl.commandline-utilities/templates/jupyterhub_config.py
|
roles/gvl.commandline-utilities/templates/jupyterhub_config.py
|
# Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}
|
# Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
c.JupyterHub.log_level = 'WARN'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}
|
Set log level to 'WARN'
|
Set log level to 'WARN'
|
Python
|
mit
|
gvlproject/gvl_commandline_utilities,nuwang/gvl_commandline_utilities,claresloggett/gvl_commandline_utilities,nuwang/gvl_commandline_utilities,claresloggett/gvl_commandline_utilities,gvlproject/gvl_commandline_utilities
|
# Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}Set log level to 'WARN'
|
# Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
c.JupyterHub.log_level = 'WARN'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}
|
<commit_before># Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}<commit_msg>Set log level to 'WARN'<commit_after>
|
# Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
c.JupyterHub.log_level = 'WARN'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}
|
# Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}Set log level to 'WARN'# Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
c.JupyterHub.log_level = 'WARN'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}
|
<commit_before># Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}<commit_msg>Set log level to 'WARN'<commit_after># Configuration file for jupyterhub.
#------------------------------------------------------------------------------
# Configurable configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# JupyterHub configuration
#------------------------------------------------------------------------------
# An Application for starting a Multi-User Jupyter Notebook server.
# The public facing ip of the proxy
c.JupyterHub.ip = '127.0.0.1'
# The ip for the proxy API handlers
c.JupyterHub.proxy_api_ip = '127.0.0.1'
# The public facing port of the proxy
c.JupyterHub.port = 9510
# The base URL of the entire application
c.JupyterHub.base_url = '/jupyterhub'
# The ip for this process
c.JupyterHub.hub_ip = '127.0.0.1'
# put the log file in /var/log
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
c.JupyterHub.log_level = 'WARN'
#------------------------------------------------------------------------------
# Spawner configuration
#------------------------------------------------------------------------------
# The IP address (or hostname) the single-user server should listen on
c.Spawner.ip = '127.0.0.1'
#------------------------------------------------------------------------------
# Authenticator configuration
#------------------------------------------------------------------------------
# A class for authentication.
#
# The API is one method, `authenticate`, a tornado gen.coroutine.
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
c.Authenticator.admin_users = {'root', 'ubuntu'}
|
9676024c92348ed52c78620f2a8b0b4cd104430d
|
location.py
|
location.py
|
# -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return address to print on shipping label"
)
|
# -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return undelivered shipments to this address"
)
|
Rename help text of return address field
|
Rename help text of return address field
|
Python
|
bsd-3-clause
|
joeirimpan/trytond-shipping,trytonus/trytond-shipping,fulfilio/trytond-shipping,prakashpp/trytond-shipping,tarunbhardwaj/trytond-shipping
|
# -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return address to print on shipping label"
)
Rename help text of return address field
|
# -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return undelivered shipments to this address"
)
|
<commit_before># -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return address to print on shipping label"
)
<commit_msg>Rename help text of return address field<commit_after>
|
# -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return undelivered shipments to this address"
)
|
# -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return address to print on shipping label"
)
Rename help text of return address field# -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return undelivered shipments to this address"
)
|
<commit_before># -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return address to print on shipping label"
)
<commit_msg>Rename help text of return address field<commit_after># -*- coding: utf-8 -*-
"""
location.py
"""
from trytond.pool import PoolMeta
from trytond.model import fields
from trytond.pyson import Eval
__all__ = ['Location']
__metaclass__ = PoolMeta
class Location:
__name__ = "stock.location"
return_address = fields.Many2One(
"party.address", "Return Address", states={
'invisible': Eval('type') != 'warehouse',
'readonly': ~Eval('active'),
}, depends=['type', 'active'],
help="Return undelivered shipments to this address"
)
|
1b58d032fc04b4791bae1448f031dde87bc4766e
|
flow/configuration/parser.py
|
flow/configuration/parser.py
|
import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class._name])
command_class.annotate_parser(parser)
return parser.parse_args()
|
import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class.name])
command_class.annotate_parser(parser)
return parser.parse_args()
|
Fix command_class _name to name
|
Fix command_class _name to name
|
Python
|
agpl-3.0
|
genome/flow-core,genome/flow-core,genome/flow-core
|
import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class._name])
command_class.annotate_parser(parser)
return parser.parse_args()
Fix command_class _name to name
|
import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class.name])
command_class.annotate_parser(parser)
return parser.parse_args()
|
<commit_before>import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class._name])
command_class.annotate_parser(parser)
return parser.parse_args()
<commit_msg>Fix command_class _name to name<commit_after>
|
import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class.name])
command_class.annotate_parser(parser)
return parser.parse_args()
|
import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class._name])
command_class.annotate_parser(parser)
return parser.parse_args()
Fix command_class _name to nameimport argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class.name])
command_class.annotate_parser(parser)
return parser.parse_args()
|
<commit_before>import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class._name])
command_class.annotate_parser(parser)
return parser.parse_args()
<commit_msg>Fix command_class _name to name<commit_after>import argparse
def create_parser(valid_command_names):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=valid_command_names)
return parser
def parse_arguments(command_class):
parser = create_parser([command_class.name])
command_class.annotate_parser(parser)
return parser.parse_args()
|
783948e6d5ce9f4a8cbdbecd4731615381ca89c0
|
scripts/set_alpha.py
|
scripts/set_alpha.py
|
#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
#!/usr/bin/env python
import sys
if len(sys.argv) > 1:
alpha_deg = sys.argv[1]
else:
alpha_deg = 4.0
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
Add a default angle of attack
|
Add a default angle of attack
|
Python
|
mit
|
petebachant/actuatorLine-2D-turbinesFoam,petebachant/actuatorLine-2D-turbinesFoam,petebachant/actuatorLine-2D-turbinesFoam
|
#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
Add a default angle of attack
|
#!/usr/bin/env python
import sys
if len(sys.argv) > 1:
alpha_deg = sys.argv[1]
else:
alpha_deg = 4.0
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
<commit_before>#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
<commit_msg>Add a default angle of attack<commit_after>
|
#!/usr/bin/env python
import sys
if len(sys.argv) > 1:
alpha_deg = sys.argv[1]
else:
alpha_deg = 4.0
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
Add a default angle of attack#!/usr/bin/env python
import sys
if len(sys.argv) > 1:
alpha_deg = sys.argv[1]
else:
alpha_deg = 4.0
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
<commit_before>#!/usr/bin/env python
import sys
alpha_deg = sys.argv[1]
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
<commit_msg>Add a default angle of attack<commit_after>#!/usr/bin/env python
import sys
if len(sys.argv) > 1:
alpha_deg = sys.argv[1]
else:
alpha_deg = 4.0
with open("system/fvOptions", "w") as f:
with open("system/fvOptions.template") as template:
txt = template.read()
f.write(txt.format(alpha_deg=alpha_deg))
|
a7b92bdbb4c71a33896105022e70c69c3bc33861
|
patterns/gradient.py
|
patterns/gradient.py
|
from blinkytape import color
class Gradient(object):
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
|
from blinkytape import color
class Gradient(object):
# TBD: If this had a length it would also work as a streak; consider
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
|
Add another TBD for future reference
|
Add another TBD for future reference
|
Python
|
mit
|
jonspeicher/blinkyfun
|
from blinkytape import color
class Gradient(object):
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
Add another TBD for future reference
|
from blinkytape import color
class Gradient(object):
# TBD: If this had a length it would also work as a streak; consider
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
|
<commit_before>from blinkytape import color
class Gradient(object):
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
<commit_msg>Add another TBD for future reference<commit_after>
|
from blinkytape import color
class Gradient(object):
# TBD: If this had a length it would also work as a streak; consider
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
|
from blinkytape import color
class Gradient(object):
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
Add another TBD for future referencefrom blinkytape import color
class Gradient(object):
# TBD: If this had a length it would also work as a streak; consider
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
|
<commit_before>from blinkytape import color
class Gradient(object):
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
<commit_msg>Add another TBD for future reference<commit_after>from blinkytape import color
class Gradient(object):
# TBD: If this had a length it would also work as a streak; consider
def __init__(self, pixel_count, start_color, end_color):
self._pixels = self._rgb_gradient(pixel_count, start_color, end_color)
@property
def pixels(self):
return list(self._pixels)
def _rgb_gradient(self, pixel_count, start_color, end_color):
red_gradient = self._gradient(start_color.red, end_color.red, pixel_count)
green_gradient = self._gradient(start_color.green, end_color.green, pixel_count)
blue_gradient = self._gradient(start_color.blue, end_color.blue, pixel_count)
rgb_gradient = zip(red_gradient, green_gradient, blue_gradient)
return [color.Color(*rgb) for rgb in rgb_gradient]
def _gradient(self, start, end, count):
delta = (end - start) / float(count - 1)
return [start + (delta * index) for index in range(0, count)]
|
d1a8f9c5423bf78ff59c6a439f21148d29da1caa
|
server/proxy_util.py
|
server/proxy_util.py
|
#!/usr/bin/env python
import datetime
import json
import logging
import urllib2
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self.har = '{}/har'.format(proxy_root)
self.har_pageref = '{}/har/pageref'
def _get(self, url):
try:
return urllib2.urlopen(url)
except urllib2.URLError as e:
self._logger.error('Proxy error: {}'.format(e))
return None
def get_next_page(self):
# TODO: Create the next page
start = datetime.datetime.now()
# TODO: Retrieve only the current page
data = self._get(self.har)
# TODO: Delete the current page and move to the next one
if not data:
return None
end = datetime.datetime.now()
content = data.read()
# No Content-Length header?
content_size = len(content)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return json.loads(content, encoding='utf8')
|
#!/usr/bin/env python
import datetime
import logging
import requests
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self._proxy_har = '{}/har'.format(proxy_root)
self._proxy_har_pageref = '{}/har/pageRef'.format(proxy_root)
def get_next_page(self):
start = datetime.datetime.now()
next_pageref = self.pageref + 1
rp = requests.put(self._proxy_har_pageref,
data={'pageRef': next_pageref})
rp.raise_for_status()
rg = requests.get('{}?pageRef={}'.format(self._proxy_har,
self.pageref))
rg.raise_for_status()
rd = requests.delete('{}/{}'.format(self._proxy_har_pageref,
self.pageref))
rd.raise_for_status()
self.pageref = next_pageref
end = datetime.datetime.now()
# No Content-Length header?
content_size = len(rg.text)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return rg.json(encoding='utf8')
|
Support creating a next page, retrieve only the current page, and delete the current page and move to the next one. Switch to requests library for PUT and DELETE methods.
|
Support creating a next page, retrieve only the current page, and delete
the current page and move to the next one.
Switch to requests library for PUT and DELETE methods.
|
Python
|
apache-2.0
|
kcaa/kcaa,kcaa/kcaa,kcaa/kcaa,kcaa/kcaa
|
#!/usr/bin/env python
import datetime
import json
import logging
import urllib2
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self.har = '{}/har'.format(proxy_root)
self.har_pageref = '{}/har/pageref'
def _get(self, url):
try:
return urllib2.urlopen(url)
except urllib2.URLError as e:
self._logger.error('Proxy error: {}'.format(e))
return None
def get_next_page(self):
# TODO: Create the next page
start = datetime.datetime.now()
# TODO: Retrieve only the current page
data = self._get(self.har)
# TODO: Delete the current page and move to the next one
if not data:
return None
end = datetime.datetime.now()
content = data.read()
# No Content-Length header?
content_size = len(content)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return json.loads(content, encoding='utf8')
Support creating a next page, retrieve only the current page, and delete
the current page and move to the next one.
Switch to requests library for PUT and DELETE methods.
|
#!/usr/bin/env python
import datetime
import logging
import requests
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self._proxy_har = '{}/har'.format(proxy_root)
self._proxy_har_pageref = '{}/har/pageRef'.format(proxy_root)
def get_next_page(self):
start = datetime.datetime.now()
next_pageref = self.pageref + 1
rp = requests.put(self._proxy_har_pageref,
data={'pageRef': next_pageref})
rp.raise_for_status()
rg = requests.get('{}?pageRef={}'.format(self._proxy_har,
self.pageref))
rg.raise_for_status()
rd = requests.delete('{}/{}'.format(self._proxy_har_pageref,
self.pageref))
rd.raise_for_status()
self.pageref = next_pageref
end = datetime.datetime.now()
# No Content-Length header?
content_size = len(rg.text)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return rg.json(encoding='utf8')
|
<commit_before>#!/usr/bin/env python
import datetime
import json
import logging
import urllib2
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self.har = '{}/har'.format(proxy_root)
self.har_pageref = '{}/har/pageref'
def _get(self, url):
try:
return urllib2.urlopen(url)
except urllib2.URLError as e:
self._logger.error('Proxy error: {}'.format(e))
return None
def get_next_page(self):
# TODO: Create the next page
start = datetime.datetime.now()
# TODO: Retrieve only the current page
data = self._get(self.har)
# TODO: Delete the current page and move to the next one
if not data:
return None
end = datetime.datetime.now()
content = data.read()
# No Content-Length header?
content_size = len(content)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return json.loads(content, encoding='utf8')
<commit_msg>Support creating a next page, retrieve only the current page, and delete
the current page and move to the next one.
Switch to requests library for PUT and DELETE methods.<commit_after>
|
#!/usr/bin/env python
import datetime
import logging
import requests
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self._proxy_har = '{}/har'.format(proxy_root)
self._proxy_har_pageref = '{}/har/pageRef'.format(proxy_root)
def get_next_page(self):
start = datetime.datetime.now()
next_pageref = self.pageref + 1
rp = requests.put(self._proxy_har_pageref,
data={'pageRef': next_pageref})
rp.raise_for_status()
rg = requests.get('{}?pageRef={}'.format(self._proxy_har,
self.pageref))
rg.raise_for_status()
rd = requests.delete('{}/{}'.format(self._proxy_har_pageref,
self.pageref))
rd.raise_for_status()
self.pageref = next_pageref
end = datetime.datetime.now()
# No Content-Length header?
content_size = len(rg.text)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return rg.json(encoding='utf8')
|
#!/usr/bin/env python
import datetime
import json
import logging
import urllib2
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self.har = '{}/har'.format(proxy_root)
self.har_pageref = '{}/har/pageref'
def _get(self, url):
try:
return urllib2.urlopen(url)
except urllib2.URLError as e:
self._logger.error('Proxy error: {}'.format(e))
return None
def get_next_page(self):
# TODO: Create the next page
start = datetime.datetime.now()
# TODO: Retrieve only the current page
data = self._get(self.har)
# TODO: Delete the current page and move to the next one
if not data:
return None
end = datetime.datetime.now()
content = data.read()
# No Content-Length header?
content_size = len(content)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return json.loads(content, encoding='utf8')
Support creating a next page, retrieve only the current page, and delete
the current page and move to the next one.
Switch to requests library for PUT and DELETE methods.#!/usr/bin/env python
import datetime
import logging
import requests
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self._proxy_har = '{}/har'.format(proxy_root)
self._proxy_har_pageref = '{}/har/pageRef'.format(proxy_root)
def get_next_page(self):
start = datetime.datetime.now()
next_pageref = self.pageref + 1
rp = requests.put(self._proxy_har_pageref,
data={'pageRef': next_pageref})
rp.raise_for_status()
rg = requests.get('{}?pageRef={}'.format(self._proxy_har,
self.pageref))
rg.raise_for_status()
rd = requests.delete('{}/{}'.format(self._proxy_har_pageref,
self.pageref))
rd.raise_for_status()
self.pageref = next_pageref
end = datetime.datetime.now()
# No Content-Length header?
content_size = len(rg.text)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return rg.json(encoding='utf8')
|
<commit_before>#!/usr/bin/env python
import datetime
import json
import logging
import urllib2
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self.har = '{}/har'.format(proxy_root)
self.har_pageref = '{}/har/pageref'
def _get(self, url):
try:
return urllib2.urlopen(url)
except urllib2.URLError as e:
self._logger.error('Proxy error: {}'.format(e))
return None
def get_next_page(self):
# TODO: Create the next page
start = datetime.datetime.now()
# TODO: Retrieve only the current page
data = self._get(self.har)
# TODO: Delete the current page and move to the next one
if not data:
return None
end = datetime.datetime.now()
content = data.read()
# No Content-Length header?
content_size = len(content)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return json.loads(content, encoding='utf8')
<commit_msg>Support creating a next page, retrieve only the current page, and delete
the current page and move to the next one.
Switch to requests library for PUT and DELETE methods.<commit_after>#!/usr/bin/env python
import datetime
import logging
import requests
class HarManager(object):
def __init__(self, args):
self._logger = logging.getLogger('kcaa.proxy_util')
self.pageref = 1
proxy_root = 'http://{}/proxy/{}'.format(args.proxy_controller,
args.proxy.partition(':')[2])
self._proxy_har = '{}/har'.format(proxy_root)
self._proxy_har_pageref = '{}/har/pageRef'.format(proxy_root)
def get_next_page(self):
start = datetime.datetime.now()
next_pageref = self.pageref + 1
rp = requests.put(self._proxy_har_pageref,
data={'pageRef': next_pageref})
rp.raise_for_status()
rg = requests.get('{}?pageRef={}'.format(self._proxy_har,
self.pageref))
rg.raise_for_status()
rd = requests.delete('{}/{}'.format(self._proxy_har_pageref,
self.pageref))
rd.raise_for_status()
self.pageref = next_pageref
end = datetime.datetime.now()
# No Content-Length header?
content_size = len(rg.text)
self._logger.debug('Poke HAR ({:.1f} KiB) in {:.2f} seconds.'.format(
(1.0 / 1024) * content_size, (end - start).total_seconds()))
# HAR content should always be encoded in UTF-8, according to the spec.
return rg.json(encoding='utf8')
|
9e9910346f7bacdc2a4fc2e92ecb8237bf38275e
|
plumbium/environment.py
|
plumbium/environment.py
|
"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except:
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
|
"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except: # pylint: disable=bare-except
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
|
Stop pylint complaining about bare-except
|
Stop pylint complaining about bare-except
|
Python
|
mit
|
jstutters/Plumbium
|
"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except:
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
Stop pylint complaining about bare-except
|
"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except: # pylint: disable=bare-except
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
|
<commit_before>"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except:
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
<commit_msg>Stop pylint complaining about bare-except<commit_after>
|
"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except: # pylint: disable=bare-except
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
|
"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except:
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
Stop pylint complaining about bare-except"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except: # pylint: disable=bare-except
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
|
<commit_before>"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except:
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
<commit_msg>Stop pylint complaining about bare-except<commit_after>"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except: # pylint: disable=bare-except
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
|
ed5dcd72b661878913be224d641c5595c73ef049
|
tests/test_auditory.py
|
tests/test_auditory.py
|
from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
|
from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
def test_erb():
bw = aud.erbbw(1000)
assert_allclose(bw, 132.63, rtol=1e-4)
|
Test of the erb calculation
|
Test of the erb calculation
|
Python
|
bsd-3-clause
|
achabotl/pambox
|
from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
Test of the erb calculation
|
from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
def test_erb():
bw = aud.erbbw(1000)
assert_allclose(bw, 132.63, rtol=1e-4)
|
<commit_before>from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
<commit_msg>Test of the erb calculation<commit_after>
|
from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
def test_erb():
bw = aud.erbbw(1000)
assert_allclose(bw, 132.63, rtol=1e-4)
|
from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
Test of the erb calculationfrom __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
def test_erb():
bw = aud.erbbw(1000)
assert_allclose(bw, 132.63, rtol=1e-4)
|
<commit_before>from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
<commit_msg>Test of the erb calculation<commit_after>from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
def test_erb():
bw = aud.erbbw(1000)
assert_allclose(bw, 132.63, rtol=1e-4)
|
721f18da4d38ac76171165596bc11e2572c60204
|
algebra.py
|
algebra.py
|
"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (int(x),
int(math.cos(theta) * y - math.sin(theta) * z),
int(math.sin(theta) * y + math.cos(theta) * z))
def Ry(x, y, z, theta):
return (int(math.cos(theta) * x + math.sin(theta) * z),
int(y),
int(-math.sin(theta) * x + math.cos(theta) * z))
def Rz(x, y, z, theta):
return (int(math.cos(theta) * x - math.sin(theta) * y),
int(math.sin(theta) * x + math.cos(theta) * y),
int(z))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (round(x, 1),
round(math.cos(theta) * y - math.sin(theta) * z, 1),
round(math.sin(theta) * y + math.cos(theta) * z, 1))
def Ry(x, y, z, theta):
return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
round(y, 1),
round(-math.sin(theta) * x + math.cos(theta) * z, 1))
def Rz(x, y, z, theta):
return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
round(math.sin(theta) * x + math.cos(theta) * y, 1),
round(z, 1))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
Fix bug where vector calculations returned Ints only
|
Fix bug where vector calculations returned Ints only
|
Python
|
mit
|
supermitch/clipycube
|
"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (int(x),
int(math.cos(theta) * y - math.sin(theta) * z),
int(math.sin(theta) * y + math.cos(theta) * z))
def Ry(x, y, z, theta):
return (int(math.cos(theta) * x + math.sin(theta) * z),
int(y),
int(-math.sin(theta) * x + math.cos(theta) * z))
def Rz(x, y, z, theta):
return (int(math.cos(theta) * x - math.sin(theta) * y),
int(math.sin(theta) * x + math.cos(theta) * y),
int(z))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
Fix bug where vector calculations returned Ints only
|
"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (round(x, 1),
round(math.cos(theta) * y - math.sin(theta) * z, 1),
round(math.sin(theta) * y + math.cos(theta) * z, 1))
def Ry(x, y, z, theta):
return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
round(y, 1),
round(-math.sin(theta) * x + math.cos(theta) * z, 1))
def Rz(x, y, z, theta):
return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
round(math.sin(theta) * x + math.cos(theta) * y, 1),
round(z, 1))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
<commit_before>"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (int(x),
int(math.cos(theta) * y - math.sin(theta) * z),
int(math.sin(theta) * y + math.cos(theta) * z))
def Ry(x, y, z, theta):
return (int(math.cos(theta) * x + math.sin(theta) * z),
int(y),
int(-math.sin(theta) * x + math.cos(theta) * z))
def Rz(x, y, z, theta):
return (int(math.cos(theta) * x - math.sin(theta) * y),
int(math.sin(theta) * x + math.cos(theta) * y),
int(z))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
<commit_msg>Fix bug where vector calculations returned Ints only<commit_after>
|
"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (round(x, 1),
round(math.cos(theta) * y - math.sin(theta) * z, 1),
round(math.sin(theta) * y + math.cos(theta) * z, 1))
def Ry(x, y, z, theta):
return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
round(y, 1),
round(-math.sin(theta) * x + math.cos(theta) * z, 1))
def Rz(x, y, z, theta):
return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
round(math.sin(theta) * x + math.cos(theta) * y, 1),
round(z, 1))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (int(x),
int(math.cos(theta) * y - math.sin(theta) * z),
int(math.sin(theta) * y + math.cos(theta) * z))
def Ry(x, y, z, theta):
return (int(math.cos(theta) * x + math.sin(theta) * z),
int(y),
int(-math.sin(theta) * x + math.cos(theta) * z))
def Rz(x, y, z, theta):
return (int(math.cos(theta) * x - math.sin(theta) * y),
int(math.sin(theta) * x + math.cos(theta) * y),
int(z))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
Fix bug where vector calculations returned Ints only"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (round(x, 1),
round(math.cos(theta) * y - math.sin(theta) * z, 1),
round(math.sin(theta) * y + math.cos(theta) * z, 1))
def Ry(x, y, z, theta):
return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
round(y, 1),
round(-math.sin(theta) * x + math.cos(theta) * z, 1))
def Rz(x, y, z, theta):
return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
round(math.sin(theta) * x + math.cos(theta) * y, 1),
round(z, 1))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
<commit_before>"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (int(x),
int(math.cos(theta) * y - math.sin(theta) * z),
int(math.sin(theta) * y + math.cos(theta) * z))
def Ry(x, y, z, theta):
return (int(math.cos(theta) * x + math.sin(theta) * z),
int(y),
int(-math.sin(theta) * x + math.cos(theta) * z))
def Rz(x, y, z, theta):
return (int(math.cos(theta) * x - math.sin(theta) * y),
int(math.sin(theta) * x + math.cos(theta) * y),
int(z))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
<commit_msg>Fix bug where vector calculations returned Ints only<commit_after>"""
Linear algebra is cool.
"""
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (round(x, 1),
round(math.cos(theta) * y - math.sin(theta) * z, 1),
round(math.sin(theta) * y + math.cos(theta) * z, 1))
def Ry(x, y, z, theta):
return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
round(y, 1),
round(-math.sin(theta) * x + math.cos(theta) * z, 1))
def Rz(x, y, z, theta):
return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
round(math.sin(theta) * x + math.cos(theta) * y, 1),
round(z, 1))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
ec1474d9144ead23b335472d6c4623f5e712e88d
|
run.py
|
run.py
|
import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True)
|
import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
Set app host to 0.0.0.0
|
Set app host to 0.0.0.0
|
Python
|
mit
|
kxxoling/horus,kxxoling/horus,kxxoling/horus
|
import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True)
Set app host to 0.0.0.0
|
import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
<commit_before>import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Set app host to 0.0.0.0<commit_after>
|
import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True)
Set app host to 0.0.0.0import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
<commit_before>import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Set app host to 0.0.0.0<commit_after>import os
from horus.apps import create_app
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, 'config.py')
app = create_app(config_file)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
9f82fe03a38d9eaf4ccd22f2ee6d13907bc3b42e
|
relay_api/api/server.py
|
relay_api/api/server.py
|
from flask import Flask, jsonify
server = Flask(__name__)
def get_relays(relays):
return jsonify({"relays": relays}), 200
def get_relay(relays, relay_name):
code = 200
try:
relay = relays[relay_name]
except KeyError:
code = 404
return "", code
return jsonify({"relay": relay}), code
|
from flask import Flask, jsonify
# import json
server = Flask(__name__)
def __serialize_relay(relays):
if type(relays).__name__ == "relay":
return jsonify({"gpio": relays.gpio,
"NC": relays.nc,
"state": relays.state})
di = {}
for r in relays:
di[r] = {"gpio": relays[r].gpio,
"NC": relays[r].nc,
"state": relays[r].state}
return jsonify(di)
def get_relays(relays_dict):
return __serialize_relay(relays_dict), 200
def get_relay(relay):
code = 200
if not relay:
code = 404
return "", code
return __serialize_relay(relay), code
|
Change to get a dict with the relay instances
|
Change to get a dict with the relay instances
|
Python
|
mit
|
pahumadad/raspi-relay-api
|
from flask import Flask, jsonify
server = Flask(__name__)
def get_relays(relays):
return jsonify({"relays": relays}), 200
def get_relay(relays, relay_name):
code = 200
try:
relay = relays[relay_name]
except KeyError:
code = 404
return "", code
return jsonify({"relay": relay}), code
Change to get a dict with the relay instances
|
from flask import Flask, jsonify
# import json
server = Flask(__name__)
def __serialize_relay(relays):
if type(relays).__name__ == "relay":
return jsonify({"gpio": relays.gpio,
"NC": relays.nc,
"state": relays.state})
di = {}
for r in relays:
di[r] = {"gpio": relays[r].gpio,
"NC": relays[r].nc,
"state": relays[r].state}
return jsonify(di)
def get_relays(relays_dict):
return __serialize_relay(relays_dict), 200
def get_relay(relay):
code = 200
if not relay:
code = 404
return "", code
return __serialize_relay(relay), code
|
<commit_before>from flask import Flask, jsonify
server = Flask(__name__)
def get_relays(relays):
return jsonify({"relays": relays}), 200
def get_relay(relays, relay_name):
code = 200
try:
relay = relays[relay_name]
except KeyError:
code = 404
return "", code
return jsonify({"relay": relay}), code
<commit_msg>Change to get a dict with the relay instances<commit_after>
|
from flask import Flask, jsonify
# import json
server = Flask(__name__)
def __serialize_relay(relays):
if type(relays).__name__ == "relay":
return jsonify({"gpio": relays.gpio,
"NC": relays.nc,
"state": relays.state})
di = {}
for r in relays:
di[r] = {"gpio": relays[r].gpio,
"NC": relays[r].nc,
"state": relays[r].state}
return jsonify(di)
def get_relays(relays_dict):
return __serialize_relay(relays_dict), 200
def get_relay(relay):
code = 200
if not relay:
code = 404
return "", code
return __serialize_relay(relay), code
|
from flask import Flask, jsonify
server = Flask(__name__)
def get_relays(relays):
return jsonify({"relays": relays}), 200
def get_relay(relays, relay_name):
code = 200
try:
relay = relays[relay_name]
except KeyError:
code = 404
return "", code
return jsonify({"relay": relay}), code
Change to get a dict with the relay instancesfrom flask import Flask, jsonify
# import json
server = Flask(__name__)
def __serialize_relay(relays):
if type(relays).__name__ == "relay":
return jsonify({"gpio": relays.gpio,
"NC": relays.nc,
"state": relays.state})
di = {}
for r in relays:
di[r] = {"gpio": relays[r].gpio,
"NC": relays[r].nc,
"state": relays[r].state}
return jsonify(di)
def get_relays(relays_dict):
return __serialize_relay(relays_dict), 200
def get_relay(relay):
code = 200
if not relay:
code = 404
return "", code
return __serialize_relay(relay), code
|
<commit_before>from flask import Flask, jsonify
server = Flask(__name__)
def get_relays(relays):
return jsonify({"relays": relays}), 200
def get_relay(relays, relay_name):
code = 200
try:
relay = relays[relay_name]
except KeyError:
code = 404
return "", code
return jsonify({"relay": relay}), code
<commit_msg>Change to get a dict with the relay instances<commit_after>from flask import Flask, jsonify
# import json
server = Flask(__name__)
def __serialize_relay(relays):
if type(relays).__name__ == "relay":
return jsonify({"gpio": relays.gpio,
"NC": relays.nc,
"state": relays.state})
di = {}
for r in relays:
di[r] = {"gpio": relays[r].gpio,
"NC": relays[r].nc,
"state": relays[r].state}
return jsonify(di)
def get_relays(relays_dict):
return __serialize_relay(relays_dict), 200
def get_relay(relay):
code = 200
if not relay:
code = 404
return "", code
return __serialize_relay(relay), code
|
967a82011c2a8e154c8386dfd0499dc5cea06da1
|
sheldon/bot.py
|
sheldon/bot.py
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
from sheldon.utils import logger
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
try:
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
except Exception as error:
logger.error_log_message('Error with loading config:')
logger.error_log_message(str(error.__traceback__))
|
Add load config function to Sheldon class
|
Add load config function to Sheldon class
|
Python
|
mit
|
lises/sheldon
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
Add load config function to Sheldon class
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
from sheldon.utils import logger
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
try:
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
except Exception as error:
logger.error_log_message('Error with loading config:')
logger.error_log_message(str(error.__traceback__))
|
<commit_before># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
<commit_msg>Add load config function to Sheldon class<commit_after>
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
from sheldon.utils import logger
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
try:
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
except Exception as error:
logger.error_log_message('Error with loading config:')
logger.error_log_message(str(error.__traceback__))
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
Add load config function to Sheldon class# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
from sheldon.utils import logger
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
try:
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
except Exception as error:
logger.error_log_message('Error with loading config:')
logger.error_log_message(str(error.__traceback__))
|
<commit_before># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
<commit_msg>Add load config function to Sheldon class<commit_after># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
from sheldon.utils import logger
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
try:
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
except Exception as error:
logger.error_log_message('Error with loading config:')
logger.error_log_message(str(error.__traceback__))
|
b07f4997b72702023721786de425533db38b5867
|
vsub/urls.py
|
vsub/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
(r'^/?$', TemplateView.as_view(template_name='index.html')),
)
urlpatterns += staticfiles_urlpatterns()
|
Set the default URL to point to index.html.
|
Set the default URL to point to index.html.
|
Python
|
mit
|
PrecisionMojo/pm-www,PrecisionMojo/pm-www
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
Set the default URL to point to index.html.
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
(r'^/?$', TemplateView.as_view(template_name='index.html')),
)
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Set the default URL to point to index.html.<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
(r'^/?$', TemplateView.as_view(template_name='index.html')),
)
urlpatterns += staticfiles_urlpatterns()
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
Set the default URL to point to index.html.from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
(r'^/?$', TemplateView.as_view(template_name='index.html')),
)
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Set the default URL to point to index.html.<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
(r'^/?$', TemplateView.as_view(template_name='index.html')),
)
urlpatterns += staticfiles_urlpatterns()
|
b8796c355bc8a763dbd2a5b6c5ed88a61f91eab7
|
tests/test_conditionals.py
|
tests/test_conditionals.py
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog and not cat")
""").output == """dog is dog\ndog is not cat""".strip()
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise if "dog" eq "dog"
Output.write("dog is still dog")
otherwise
Output.write("dog is not dog and not cat")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "Dog"
Output.write("dog is Dog")
otherwise if "dog" eq "mouse"
Output.write("dog is mouse")
otherwise
Output.write("dog is not cat and not mouse and not Dog")
""").output == """dog is dog\ndog is not cat and not mouse and not Dog""".strip()
|
Update conditional else branch tests
|
Update conditional else branch tests
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog and not cat")
""").output == """dog is dog\ndog is not cat""".strip()
Update conditional else branch tests
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise if "dog" eq "dog"
Output.write("dog is still dog")
otherwise
Output.write("dog is not dog and not cat")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "Dog"
Output.write("dog is Dog")
otherwise if "dog" eq "mouse"
Output.write("dog is mouse")
otherwise
Output.write("dog is not cat and not mouse and not Dog")
""").output == """dog is dog\ndog is not cat and not mouse and not Dog""".strip()
|
<commit_before>import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog and not cat")
""").output == """dog is dog\ndog is not cat""".strip()
<commit_msg>Update conditional else branch tests<commit_after>
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise if "dog" eq "dog"
Output.write("dog is still dog")
otherwise
Output.write("dog is not dog and not cat")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "Dog"
Output.write("dog is Dog")
otherwise if "dog" eq "mouse"
Output.write("dog is mouse")
otherwise
Output.write("dog is not cat and not mouse and not Dog")
""").output == """dog is dog\ndog is not cat and not mouse and not Dog""".strip()
|
import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog and not cat")
""").output == """dog is dog\ndog is not cat""".strip()
Update conditional else branch testsimport pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise if "dog" eq "dog"
Output.write("dog is still dog")
otherwise
Output.write("dog is not dog and not cat")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "Dog"
Output.write("dog is Dog")
otherwise if "dog" eq "mouse"
Output.write("dog is mouse")
otherwise
Output.write("dog is not cat and not mouse and not Dog")
""").output == """dog is dog\ndog is not cat and not mouse and not Dog""".strip()
|
<commit_before>import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog and not cat")
""").output == """dog is dog\ndog is not cat""".strip()
<commit_msg>Update conditional else branch tests<commit_after>import pytest
from thinglang.runner import run
def test_simple_conditionals():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
if "dog" eq "cat"
Output.write("dog is cat")
""").output == """dog is dog""".strip()
def test_unconditional_else():
assert run("""
thing Program
does start
if "dog" eq "dog"
Output.write("dog is dog")
otherwise
Output.write("dog is not dog")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise
Output.write("dog is not cat")
""").output == """dog is dog\ndog is not cat""".strip()
def test_conditional_else():
assert run("""
thing Program
does start
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "dog"
Output.write("dog is dog")
otherwise if "dog" eq "dog"
Output.write("dog is still dog")
otherwise
Output.write("dog is not dog and not cat")
if "dog" eq "cat"
Output.write("dog is cat")
otherwise if "dog" eq "Dog"
Output.write("dog is Dog")
otherwise if "dog" eq "mouse"
Output.write("dog is mouse")
otherwise
Output.write("dog is not cat and not mouse and not Dog")
""").output == """dog is dog\ndog is not cat and not mouse and not Dog""".strip()
|
cb1591a4c614d6ecbd4ad2cbed2a736fe14f2428
|
mopidy_beets/actor.py
|
mopidy_beets/actor.py
|
from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def change_track(self, track):
track_id = track.uri.split(';')[1]
logger.debug(
'Getting info for track %s with id %s' % (track.uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return super(BeetsPlaybackProvider, self).play(track)
|
from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
track_id = uri.split(';')[1]
logger.debug('Getting info for track %s with id %s' % (uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return track.uri
|
Use translate_uri() instead of change_track()
|
playback: Use translate_uri() instead of change_track()
|
Python
|
mit
|
mopidy/mopidy-beets
|
from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def change_track(self, track):
track_id = track.uri.split(';')[1]
logger.debug(
'Getting info for track %s with id %s' % (track.uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return super(BeetsPlaybackProvider, self).play(track)
playback: Use translate_uri() instead of change_track()
|
from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
track_id = uri.split(';')[1]
logger.debug('Getting info for track %s with id %s' % (uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return track.uri
|
<commit_before>from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def change_track(self, track):
track_id = track.uri.split(';')[1]
logger.debug(
'Getting info for track %s with id %s' % (track.uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return super(BeetsPlaybackProvider, self).play(track)
<commit_msg>playback: Use translate_uri() instead of change_track()<commit_after>
|
from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
track_id = uri.split(';')[1]
logger.debug('Getting info for track %s with id %s' % (uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return track.uri
|
from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def change_track(self, track):
track_id = track.uri.split(';')[1]
logger.debug(
'Getting info for track %s with id %s' % (track.uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return super(BeetsPlaybackProvider, self).play(track)
playback: Use translate_uri() instead of change_track()from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
track_id = uri.split(';')[1]
logger.debug('Getting info for track %s with id %s' % (uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return track.uri
|
<commit_before>from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def change_track(self, track):
track_id = track.uri.split(';')[1]
logger.debug(
'Getting info for track %s with id %s' % (track.uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return super(BeetsPlaybackProvider, self).play(track)
<commit_msg>playback: Use translate_uri() instead of change_track()<commit_after>from __future__ import unicode_literals
import logging
from mopidy import backend
import pykka
from .client import BeetsRemoteClient
from .library import BeetsLibraryProvider
logger = logging.getLogger(__name__)
class BeetsBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(BeetsBackend, self).__init__()
beets_endpoint = 'http://%s:%s' % (
config['beets']['hostname'], config['beets']['port'])
self.beets_api = BeetsRemoteClient(beets_endpoint)
self.library = BeetsLibraryProvider(backend=self)
self.playback = BeetsPlaybackProvider(audio=audio, backend=self)
self.playlists = None
self.uri_schemes = ['beets']
class BeetsPlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
track_id = uri.split(';')[1]
logger.debug('Getting info for track %s with id %s' % (uri, track_id))
track = self.backend.beets_api.get_track(track_id, True)
return track.uri
|
d2444e557e097f375ee830ebf382d68b702b80da
|
src/ansible/forms.py
|
src/ansible/forms.py
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea)
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
Set Textarea width and height
|
Set Textarea width and height
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea)
Set Textarea width and height
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
<commit_before>from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea)
<commit_msg>Set Textarea width and height<commit_after>
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea)
Set Textarea width and heightfrom django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
<commit_before>from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea)
<commit_msg>Set Textarea width and height<commit_after>from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookEditForm(forms.Form):
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
da1f251195baf20e7dd78a173f84c61e76c91c2a
|
docs/conf.py
|
docs/conf.py
|
import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
|
import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
|
Remove intersphinx extension from documentation.
|
Remove intersphinx extension from documentation.
|
Python
|
bsd-3-clause
|
MAECProject/python-maec
|
import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
Remove intersphinx extension from documentation.
|
import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
|
<commit_before>import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
<commit_msg>Remove intersphinx extension from documentation.<commit_after>
|
import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
|
import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
Remove intersphinx extension from documentation.import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
|
<commit_before>import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
<commit_msg>Remove intersphinx extension from documentation.<commit_after>import os
import maec
project = u'python-maec'
copyright = u'2014, The MITRE Corporation'
version = maec.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'api_vs_bindings/*_snippet.rst',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'python-maec.tex', u'python-maec Documentation',
u'The MITRE Corporation', 'manual'),
]
|
a4f09620d8939aa8141b39972fb49d82f5380875
|
src/build/console.py
|
src/build/console.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time()*1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds//10000
timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
if operation:
print("{} {:^15s} {}".format(timestamp, operation, message))
else:
print("{} {}".format(timestamp, message))
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time() * 1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds // 10000
timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
else:
print("\033[34m{}\033[0m {}".format(timestamp, message))
|
Add colored time in output
|
Add colored time in output
|
Python
|
mpl-2.0
|
seleznev/firefox-complete-theme-build-system
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time()*1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds//10000
timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
if operation:
print("{} {:^15s} {}".format(timestamp, operation, message))
else:
print("{} {}".format(timestamp, message))
Add colored time in output
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time() * 1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds // 10000
timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
else:
print("\033[34m{}\033[0m {}".format(timestamp, message))
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time()*1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds//10000
timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
if operation:
print("{} {:^15s} {}".format(timestamp, operation, message))
else:
print("{} {}".format(timestamp, message))
<commit_msg>Add colored time in output<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time() * 1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds // 10000
timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
else:
print("\033[34m{}\033[0m {}".format(timestamp, message))
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time()*1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds//10000
timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
if operation:
print("{} {:^15s} {}".format(timestamp, operation, message))
else:
print("{} {}".format(timestamp, message))
Add colored time in output# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time() * 1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds // 10000
timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
else:
print("\033[34m{}\033[0m {}".format(timestamp, message))
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time()*1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds//10000
timestamp = "{:02}:{:02}.{:02}".format(m, s, ms)
if operation:
print("{} {:^15s} {}".format(timestamp, operation, message))
else:
print("{} {}".format(timestamp, message))
<commit_msg>Add colored time in output<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
import datetime
start_time = 0
def start_timer():
global start_time
start_time = int(round(time.time()*1000))
def log(operation=None, message=None, timestamp=True):
current_time = int(round(time.time() * 1000))
d = datetime.timedelta(milliseconds=current_time-start_time)
m = d.seconds // 60
s = d.seconds - (m * 60)
ms = d.microseconds // 10000
timestamp = "{:2}:{:02}.{:02}".format(m, s, ms)
if operation:
print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message))
else:
print("\033[34m{}\033[0m {}".format(timestamp, message))
|
9ceace60593f133b4f6dfdbd9b6f583362415294
|
src/configuration.py
|
src/configuration.py
|
import ConfigParser
import os
def class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self)
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.'
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
|
import ConfigParser
import os
class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self):
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
|
Fix a few syntax errors
|
Fix a few syntax errors
|
Python
|
agpl-3.0
|
MichelJuillard/dlstats,Widukind/dlstats,mmalter/dlstats,mmalter/dlstats,Widukind/dlstats,MichelJuillard/dlstats,mmalter/dlstats,MichelJuillard/dlstats
|
import ConfigParser
import os
def class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self)
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.'
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
Fix a few syntax errors
|
import ConfigParser
import os
class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self):
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
|
<commit_before>import ConfigParser
import os
def class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self)
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.'
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
<commit_msg>Fix a few syntax errors<commit_after>
|
import ConfigParser
import os
class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self):
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
|
import ConfigParser
import os
def class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self)
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.'
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
Fix a few syntax errorsimport ConfigParser
import os
class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self):
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
|
<commit_before>import ConfigParser
import os
def class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self)
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.'
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
<commit_msg>Fix a few syntax errors<commit_after>import ConfigParser
import os
class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self):
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
|
4298e82a3dc4c6577b41b4acbb73ff7bb5795002
|
src/django_registration/backends/one_step/views.py
|
src/django_registration/backends/one_step/views.py
|
"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(
username=getattr(new_user, User.USERNAME_FIELD),
password=form.cleaned_data['password1']
)
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(**{
User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
'password': form.cleaned_data['password1']
})
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
Make the one-step backend a little more robust with custom users.
|
Make the one-step backend a little more robust with custom users.
|
Python
|
bsd-3-clause
|
ubernostrum/django-registration
|
"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(
username=getattr(new_user, User.USERNAME_FIELD),
password=form.cleaned_data['password1']
)
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
Make the one-step backend a little more robust with custom users.
|
"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(**{
User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
'password': form.cleaned_data['password1']
})
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
<commit_before>"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(
username=getattr(new_user, User.USERNAME_FIELD),
password=form.cleaned_data['password1']
)
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
<commit_msg>Make the one-step backend a little more robust with custom users.<commit_after>
|
"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(**{
User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
'password': form.cleaned_data['password1']
})
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(
username=getattr(new_user, User.USERNAME_FIELD),
password=form.cleaned_data['password1']
)
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
Make the one-step backend a little more robust with custom users."""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(**{
User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
'password': form.cleaned_data['password1']
})
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
<commit_before>"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(
username=getattr(new_user, User.USERNAME_FIELD),
password=form.cleaned_data['password1']
)
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
<commit_msg>Make the one-step backend a little more robust with custom users.<commit_after>"""
A one-step (user signs up and is immediately active and logged in)
workflow.
"""
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(**{
User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
'password': form.cleaned_data['password1']
})
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
76ed0bb6415209aa28350d4304e7b87715ba37f5
|
qllr/templating.py
|
qllr/templating.py
|
import typing
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
return request.url_for(name, **path_params)
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
import typing
from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
# NOTE: take this stupid hack away, when url_for returns relative path
absolute_url = request.url_for(name, **path_params)
parsed_absolute_url = urlparse(absolute_url)
return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
Make templates to return relative path
|
Make templates to return relative path
|
Python
|
agpl-3.0
|
em92/quakelive-local-ratings,em92/pickup-rating,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/pickup-rating,em92/pickup-rating,em92/quakelive-local-ratings
|
import typing
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
return request.url_for(name, **path_params)
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
Make templates to return relative path
|
import typing
from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
# NOTE: take this stupid hack away, when url_for returns relative path
absolute_url = request.url_for(name, **path_params)
parsed_absolute_url = urlparse(absolute_url)
return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
<commit_before>import typing
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
return request.url_for(name, **path_params)
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
<commit_msg>Make templates to return relative path<commit_after>
|
import typing
from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
# NOTE: take this stupid hack away, when url_for returns relative path
absolute_url = request.url_for(name, **path_params)
parsed_absolute_url = urlparse(absolute_url)
return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
import typing
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
return request.url_for(name, **path_params)
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
Make templates to return relative pathimport typing
from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
# NOTE: take this stupid hack away, when url_for returns relative path
absolute_url = request.url_for(name, **path_params)
parsed_absolute_url = urlparse(absolute_url)
return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
<commit_before>import typing
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
return request.url_for(name, **path_params)
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
<commit_msg>Make templates to return relative path<commit_after>import typing
from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
# NOTE: take this stupid hack away, when url_for returns relative path
absolute_url = request.url_for(name, **path_params)
parsed_absolute_url = urlparse(absolute_url)
return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
18d04567570d0b5e9156c720d1648338aba58369
|
readux/__init__.py
|
readux/__init__.py
|
__version_info__ = (1, 5, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
__version_info__ = (1, 6, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
Bump dev version to 1.6 after releasing 1.5
|
Bump dev version to 1.6 after releasing 1.5
|
Python
|
apache-2.0
|
emory-libraries/readux,emory-libraries/readux,emory-libraries/readux
|
__version_info__ = (1, 5, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
Bump dev version to 1.6 after releasing 1.5
|
__version_info__ = (1, 6, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
<commit_before>__version_info__ = (1, 5, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
<commit_msg>Bump dev version to 1.6 after releasing 1.5<commit_after>
|
__version_info__ = (1, 6, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
__version_info__ = (1, 5, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
Bump dev version to 1.6 after releasing 1.5__version_info__ = (1, 6, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
<commit_before>__version_info__ = (1, 5, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
<commit_msg>Bump dev version to 1.6 after releasing 1.5<commit_after>__version_info__ = (1, 6, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
|
240d4d33dc6570c957ce568a952a1a282dc50736
|
opps/article/views.py
|
opps/article/views.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'article/{0}/{1}.html'.format(long_slug, self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
|
Fix template name on entry home page (/) on detail page
|
Fix template name on entry home page (/) on detail page
|
Python
|
mit
|
williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
Fix template name on entry home page (/) on detail page
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'article/{0}/{1}.html'.format(long_slug, self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
<commit_msg>Fix template name on entry home page (/) on detail page<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'article/{0}/{1}.html'.format(long_slug, self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
Fix template name on entry home page (/) on detail page#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'article/{0}/{1}.html'.format(long_slug, self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
<commit_msg>Fix template name on entry home page (/) on detail page<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'channel/{0}.html'.format(long_slug)
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return 'article/{0}/{1}.html'.format(long_slug, self.kwargs['slug'])
@property
def queryset(self):
long_slug = self.kwargs.get('channel__long_slug', 'home')
return Post.objects.filter(channel__long_slug=long_slug,
slug=self.kwargs['slug']).all()
|
e68b0f10cd2dcbeade127ca3c2a30408595e9ecb
|
ownership/__init__.py
|
ownership/__init__.py
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
|
Add proxy fix as in lr this will run with reverse proxy
|
Add proxy fix as in lr this will run with reverse proxy
|
Python
|
mit
|
LandRegistry/ownership-alpha,LandRegistry/ownership-alpha,LandRegistry/ownership-alpha
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
Add proxy fix as in lr this will run with reverse proxy
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
|
<commit_before>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
<commit_msg>Add proxy fix as in lr this will run with reverse proxy<commit_after>
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
Add proxy fix as in lr this will run with reverse proxyfrom flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
|
<commit_before>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
<commit_msg>Add proxy fix as in lr this will run with reverse proxy<commit_after>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from .health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
def health(self):
try:
with self.engine.connect() as c:
c.execute('select 1=1').fetchall()
return True, 'DB'
except:
return False, 'DB'
SQLAlchemy.health = health
db = SQLAlchemy(app)
Health(app, checks=[db.health])
|
6947a38fd99447809870d82a425abd4db9d884fe
|
test/htmltoreadable.py
|
test/htmltoreadable.py
|
# -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.css('.post_show')
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
|
# -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.doc.tree.cssselect('.post_show')[0]
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
|
Delete using deprecated fnc in test
|
Delete using deprecated fnc in test
|
Python
|
mit
|
shigarus/NewsParser
|
# -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.css('.post_show')
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
Delete using deprecated fnc in test
|
# -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.doc.tree.cssselect('.post_show')[0]
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
|
<commit_before># -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.css('.post_show')
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
<commit_msg>Delete using deprecated fnc in test<commit_after>
|
# -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.doc.tree.cssselect('.post_show')[0]
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
|
# -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.css('.post_show')
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
Delete using deprecated fnc in test# -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.doc.tree.cssselect('.post_show')[0]
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
|
<commit_before># -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.css('.post_show')
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
<commit_msg>Delete using deprecated fnc in test<commit_after># -*- coding: utf-8 -*-
import codecs
import os
import grab
from src import htmltoreadable as hr
def test():
g = grab.Grab()
g.go('http://habrahabr.ru/post/266293/')
root_node = g.doc.tree.cssselect('.post_show')[0]
text = hr.html_to_readable(root_node)
path = 'out'
if not os.path.exists(path):
os.mkdir(path)
outpath = os.path.join(path, 'out.log')
with codecs.open(outpath, 'w', encoding='utf-8') as fh:
fh.write(text)
if __name__ == '__main__':
test()
|
2dad35a7fb6f4daa80b7f760889013fd8eb54753
|
examples/drawing/random_geometric_graph.py
|
examples/drawing/random_geometric_graph.py
|
import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
pos=G.pos # position is stored as member data for random_geometric_graph
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
|
import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
# position is stored as node attribute data for random_geometric_graph
pos=nx.get_node_attributes(G,'pos')
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
|
Update exmple for node position in new RGG interface.
|
Update exmple for node position in new RGG interface.
|
Python
|
bsd-3-clause
|
blublud/networkx,nathania/networkx,jni/networkx,ghdk/networkx,RMKD/networkx,ionanrozenfeld/networkx,kai5263499/networkx,bzero/networkx,dmoliveira/networkx,kernc/networkx,SanketDG/networkx,RMKD/networkx,jni/networkx,debsankha/networkx,jakevdp/networkx,kai5263499/networkx,wasade/networkx,chrisnatali/networkx,yashu-seth/networkx,jakevdp/networkx,ltiao/networkx,jcurbelo/networkx,chrisnatali/networkx,farhaanbukhsh/networkx,sharifulgeo/networkx,OrkoHunter/networkx,dhimmel/networkx,kernc/networkx,aureooms/networkx,nathania/networkx,bzero/networkx,chrisnatali/networkx,sharifulgeo/networkx,jakevdp/networkx,cmtm/networkx,beni55/networkx,debsankha/networkx,Sixshaman/networkx,aureooms/networkx,kai5263499/networkx,dmoliveira/networkx,debsankha/networkx,ghdk/networkx,ghdk/networkx,jni/networkx,harlowja/networkx,nathania/networkx,JamesClough/networkx,dhimmel/networkx,blublud/networkx,goulu/networkx,blublud/networkx,jfinkels/networkx,ionanrozenfeld/networkx,aureooms/networkx,andnovar/networkx,ionanrozenfeld/networkx,harlowja/networkx,dhimmel/networkx,jtorrents/networkx,kernc/networkx,farhaanbukhsh/networkx,RMKD/networkx,NvanAdrichem/networkx,tmilicic/networkx,jtorrents/networkx,michaelpacer/networkx,bzero/networkx,farhaanbukhsh/networkx,dmoliveira/networkx,sharifulgeo/networkx,harlowja/networkx
|
import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
pos=G.pos # position is stored as member data for random_geometric_graph
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
Update exmple for node position in new RGG interface.
|
import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
# position is stored as node attribute data for random_geometric_graph
pos=nx.get_node_attributes(G,'pos')
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
|
<commit_before>import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
pos=G.pos # position is stored as member data for random_geometric_graph
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
<commit_msg>Update exmple for node position in new RGG interface.<commit_after>
|
import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
# position is stored as node attribute data for random_geometric_graph
pos=nx.get_node_attributes(G,'pos')
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
|
import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
pos=G.pos # position is stored as member data for random_geometric_graph
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
Update exmple for node position in new RGG interface.import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
# position is stored as node attribute data for random_geometric_graph
pos=nx.get_node_attributes(G,'pos')
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
|
<commit_before>import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
pos=G.pos # position is stored as member data for random_geometric_graph
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
<commit_msg>Update exmple for node position in new RGG interface.<commit_after>import networkx as nx
import matplotlib.pyplot as plt
G=nx.random_geometric_graph(200,0.125)
# position is stored as node attribute data for random_geometric_graph
pos=nx.get_node_attributes(G,'pos')
# find node near center (0.5,0.5)
dmin=1
ncenter=0
for n in pos:
x,y=pos[n]
d=(x-0.5)**2+(y-0.5)**2
if d<dmin:
ncenter=n
dmin=d
# color by path length from node near center
p=nx.single_source_shortest_path_length(G,ncenter)
plt.figure(figsize=(8,8))
nx.draw_networkx_edges(G,pos,nodelist=[ncenter],alpha=0.4)
nx.draw_networkx_nodes(G,pos,nodelist=p.keys(),
node_size=80,
node_color=p.values(),
cmap=plt.cm.Reds_r)
plt.xlim(-0.05,1.05)
plt.ylim(-0.05,1.05)
plt.axis('off')
plt.savefig('random_geometric_graph.png')
plt.show()
|
3f025b5400c0855472a772487de8930bac9b5eef
|
numpy/setupscons.py
|
numpy/setupscons.py
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy',parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
config.scons_make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
|
#!/usr/bin/env python
from os.path import join as pjoin
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.misc_util import scons_generate_config_py
pkgname = 'numpy'
config = Configuration(pkgname,parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
def add_config(*args, **kw):
# Generate __config__, handle inplace issues.
if kw['scons_cmd'].inplace:
target = pjoin(kw['pkg_name'], '__config__.py')
else:
target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'], '__config__.py')
scons_generate_config_py(target)
config.add_sconscript(None, post_hook = add_config)
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
|
Handle inplace generation of __config__.
|
Handle inplace generation of __config__.
|
Python
|
bsd-3-clause
|
rhythmsosad/numpy,numpy/numpy,grlee77/numpy,ViralLeadership/numpy,mattip/numpy,chiffa/numpy,numpy/numpy-refactor,numpy/numpy-refactor,bmorris3/numpy,joferkington/numpy,ekalosak/numpy,mindw/numpy,madphysicist/numpy,matthew-brett/numpy,BMJHayward/numpy,stefanv/numpy,WillieMaddox/numpy,solarjoe/numpy,rajathkumarmp/numpy,musically-ut/numpy,ChristopherHogan/numpy,hainm/numpy,mathdd/numpy,ajdawson/numpy,tynn/numpy,CMartelLML/numpy,seberg/numpy,mingwpy/numpy,yiakwy/numpy,ChristopherHogan/numpy,naritta/numpy,nbeaver/numpy,andsor/numpy,bertrand-l/numpy,mhvk/numpy,dwf/numpy,dwf/numpy,mhvk/numpy,groutr/numpy,tynn/numpy,ssanderson/numpy,rajathkumarmp/numpy,pelson/numpy,rherault-insa/numpy,njase/numpy,pdebuyl/numpy,pelson/numpy,utke1/numpy,gmcastil/numpy,stuarteberg/numpy,tdsmith/numpy,trankmichael/numpy,ESSS/numpy,mwiebe/numpy,Eric89GXL/numpy,pyparallel/numpy,bringingheavendown/numpy,hainm/numpy,nbeaver/numpy,ogrisel/numpy,rgommers/numpy,cjermain/numpy,has2k1/numpy,charris/numpy,skymanaditya1/numpy,gfyoung/numpy,GaZ3ll3/numpy,Anwesh43/numpy,SunghanKim/numpy,endolith/numpy,ewmoore/numpy,dch312/numpy,has2k1/numpy,ddasilva/numpy,WarrenWeckesser/numpy,rajathkumarmp/numpy,felipebetancur/numpy,b-carter/numpy,hainm/numpy,kirillzhuravlev/numpy,rhythmsosad/numpy,jakirkham/numpy,rgommers/numpy,charris/numpy,anntzer/numpy,sonnyhu/numpy,CMartelLML/numpy,jonathanunderwood/numpy,pbrod/numpy,Linkid/numpy,brandon-rhodes/numpy,ahaldane/numpy,groutr/numpy,jorisvandenbossche/numpy,mingwpy/numpy,cjermain/numpy,ddasilva/numpy,rgommers/numpy,andsor/numpy,jakirkham/numpy,leifdenby/numpy,Linkid/numpy,nguyentu1602/numpy,Anwesh43/numpy,stefanv/numpy,charris/numpy,WarrenWeckesser/numpy,numpy/numpy,gfyoung/numpy,jankoslavic/numpy,AustereCuriosity/numpy,dwf/numpy,seberg/numpy,has2k1/numpy,SiccarPoint/numpy,naritta/numpy,maniteja123/numpy,BabeNovelty/numpy,simongibbons/numpy,argriffing/numpy,nguyentu1602/numpy,bringingheavendown/numpy,astrofrog/numpy,nbeaver/numpy,larsmans/numpy,b-carter/numpy,Dapid/numpy,anntzer/numpy,pelson/numpy,mattip/numpy,dwillmer/numpy,githubmlai/numpy,jankoslavic/numpy,tdsmith/numpy,hainm/numpy,drasmuss/numpy,kiwifb/numpy,CMartelLML/numpy,grlee77/numpy,utke1/numpy,empeeu/numpy,sinhrks/numpy,SiccarPoint/numpy,ewmoore/numpy,ddasilva/numpy,ESSS/numpy,cowlicks/numpy,NextThought/pypy-numpy,larsmans/numpy,rudimeier/numpy,sinhrks/numpy,dwillmer/numpy,Dapid/numpy,ChanderG/numpy,MSeifert04/numpy,stefanv/numpy,bringingheavendown/numpy,MaPePeR/numpy,GaZ3ll3/numpy,Anwesh43/numpy,dch312/numpy,rhythmsosad/numpy,KaelChen/numpy,embray/numpy,chatcannon/numpy,dwf/numpy,MSeifert04/numpy,brandon-rhodes/numpy,rajathkumarmp/numpy,stuarteberg/numpy,seberg/numpy,mortada/numpy,sonnyhu/numpy,rherault-insa/numpy,pbrod/numpy,empeeu/numpy,anntzer/numpy,joferkington/numpy,cjermain/numpy,ekalosak/numpy,shoyer/numpy,dwf/numpy,brandon-rhodes/numpy,cowlicks/numpy,ssanderson/numpy,NextThought/pypy-numpy,yiakwy/numpy,musically-ut/numpy,anntzer/numpy,tdsmith/numpy,musically-ut/numpy,rherault-insa/numpy,pbrod/numpy,SunghanKim/numpy,BabeNovelty/numpy,numpy/numpy-refactor,simongibbons/numpy,brandon-rhodes/numpy,nguyentu1602/numpy,simongibbons/numpy,MichaelAquilina/numpy,jorisvandenbossche/numpy,endolith/numpy,Eric89GXL/numpy,astrofrog/numpy,mhvk/numpy,behzadnouri/numpy,pdebuyl/numpy,trankmichael/numpy,jakirkham/numpy,shoyer/numpy,AustereCuriosity/numpy,GrimDerp/numpy,njase/numpy,argriffing/numpy,ahaldane/numpy,KaelChen/numpy,pyparallel/numpy,moreati/numpy,grlee77/numpy,gfyoung/numpy,cowlicks/numpy,jschueller/numpy,mortada/numpy,chiffa/numpy,MichaelAquilina/numpy,bertrand-l/numpy,seberg/numpy,AustereCuriosity/numpy,felipebetancur/numpy,leifdenby/numpy,kirillzhuravlev/numpy,pizzathief/numpy,grlee77/numpy,jorisvandenbossche/numpy,bmorris3/numpy,Anwesh43/numpy,ChristopherHogan/numpy,MSeifert04/numpy,pbrod/numpy,rmcgibbo/numpy,rudimeier/numpy,jschueller/numpy,chatcannon/numpy,GrimDerp/numpy,embray/numpy,mingwpy/numpy,Eric89GXL/numpy,numpy/numpy,mattip/numpy,skwbc/numpy,GaZ3ll3/numpy,sigma-random/numpy,mwiebe/numpy,leifdenby/numpy,madphysicist/numpy,MSeifert04/numpy,WarrenWeckesser/numpy,GrimDerp/numpy,ajdawson/numpy,dimasad/numpy,jakirkham/numpy,Linkid/numpy,bertrand-l/numpy,trankmichael/numpy,matthew-brett/numpy,solarjoe/numpy,pdebuyl/numpy,dimasad/numpy,ahaldane/numpy,BMJHayward/numpy,numpy/numpy-refactor,joferkington/numpy,jonathanunderwood/numpy,behzadnouri/numpy,drasmuss/numpy,tacaswell/numpy,WillieMaddox/numpy,BabeNovelty/numpy,numpy/numpy-refactor,mingwpy/numpy,dato-code/numpy,Dapid/numpy,ahaldane/numpy,MichaelAquilina/numpy,empeeu/numpy,mwiebe/numpy,has2k1/numpy,githubmlai/numpy,jakirkham/numpy,rudimeier/numpy,groutr/numpy,matthew-brett/numpy,MaPePeR/numpy,b-carter/numpy,immerrr/numpy,ContinuumIO/numpy,BabeNovelty/numpy,astrofrog/numpy,ewmoore/numpy,ChanderG/numpy,sigma-random/numpy,SunghanKim/numpy,rgommers/numpy,githubmlai/numpy,felipebetancur/numpy,tdsmith/numpy,dato-code/numpy,Srisai85/numpy,moreati/numpy,njase/numpy,madphysicist/numpy,ewmoore/numpy,GaZ3ll3/numpy,dimasad/numpy,andsor/numpy,joferkington/numpy,GrimDerp/numpy,pelson/numpy,ogrisel/numpy,andsor/numpy,pizzathief/numpy,skwbc/numpy,empeeu/numpy,abalkin/numpy,madphysicist/numpy,tynn/numpy,ajdawson/numpy,ChanderG/numpy,Yusa95/numpy,behzadnouri/numpy,SiccarPoint/numpy,tacaswell/numpy,BMJHayward/numpy,skymanaditya1/numpy,ekalosak/numpy,skymanaditya1/numpy,rmcgibbo/numpy,yiakwy/numpy,mattip/numpy,Yusa95/numpy,chiffa/numpy,Linkid/numpy,pelson/numpy,Srisai85/numpy,argriffing/numpy,mathdd/numpy,dch312/numpy,embray/numpy,mathdd/numpy,simongibbons/numpy,larsmans/numpy,rudimeier/numpy,mhvk/numpy,jschueller/numpy,ContinuumIO/numpy,trankmichael/numpy,sinhrks/numpy,ogrisel/numpy,NextThought/pypy-numpy,moreati/numpy,simongibbons/numpy,immerrr/numpy,KaelChen/numpy,yiakwy/numpy,Srisai85/numpy,stefanv/numpy,kiwifb/numpy,sigma-random/numpy,gmcastil/numpy,ChristopherHogan/numpy,naritta/numpy,madphysicist/numpy,abalkin/numpy,chatcannon/numpy,ESSS/numpy,ChanderG/numpy,endolith/numpy,SunghanKim/numpy,immerrr/numpy,grlee77/numpy,pizzathief/numpy,matthew-brett/numpy,SiccarPoint/numpy,BMJHayward/numpy,MaPePeR/numpy,felipebetancur/numpy,ViralLeadership/numpy,jorisvandenbossche/numpy,dato-code/numpy,pyparallel/numpy,bmorris3/numpy,mindw/numpy,ogrisel/numpy,solarjoe/numpy,sinhrks/numpy,sonnyhu/numpy,nguyentu1602/numpy,WillieMaddox/numpy,jankoslavic/numpy,ssanderson/numpy,sigma-random/numpy,mindw/numpy,pizzathief/numpy,shoyer/numpy,ContinuumIO/numpy,Eric89GXL/numpy,embray/numpy,charris/numpy,kiwifb/numpy,kirillzhuravlev/numpy,ewmoore/numpy,astrofrog/numpy,dimasad/numpy,gmcastil/numpy,cjermain/numpy,cowlicks/numpy,maniteja123/numpy,dwillmer/numpy,CMartelLML/numpy,ahaldane/numpy,Srisai85/numpy,mindw/numpy,WarrenWeckesser/numpy,stuarteberg/numpy,pizzathief/numpy,jankoslavic/numpy,jschueller/numpy,dwillmer/numpy,maniteja123/numpy,naritta/numpy,shoyer/numpy,abalkin/numpy,WarrenWeckesser/numpy,jonathanunderwood/numpy,embray/numpy,NextThought/pypy-numpy,endolith/numpy,Yusa95/numpy,stuarteberg/numpy,KaelChen/numpy,Yusa95/numpy,MichaelAquilina/numpy,rmcgibbo/numpy,numpy/numpy,MSeifert04/numpy,githubmlai/numpy,immerrr/numpy,utke1/numpy,astrofrog/numpy,ajdawson/numpy,larsmans/numpy,skymanaditya1/numpy,stefanv/numpy,sonnyhu/numpy,rmcgibbo/numpy,tacaswell/numpy,jorisvandenbossche/numpy,MaPePeR/numpy,shoyer/numpy,mhvk/numpy,mathdd/numpy,bmorris3/numpy,dato-code/numpy,ViralLeadership/numpy,rhythmsosad/numpy,musically-ut/numpy,dch312/numpy,pdebuyl/numpy,drasmuss/numpy,mortada/numpy,ogrisel/numpy,pbrod/numpy,kirillzhuravlev/numpy,matthew-brett/numpy,mortada/numpy,ekalosak/numpy,skwbc/numpy
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy',parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
config.scons_make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
Handle inplace generation of __config__.
|
#!/usr/bin/env python
from os.path import join as pjoin
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.misc_util import scons_generate_config_py
pkgname = 'numpy'
config = Configuration(pkgname,parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
def add_config(*args, **kw):
# Generate __config__, handle inplace issues.
if kw['scons_cmd'].inplace:
target = pjoin(kw['pkg_name'], '__config__.py')
else:
target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'], '__config__.py')
scons_generate_config_py(target)
config.add_sconscript(None, post_hook = add_config)
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
|
<commit_before>#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy',parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
config.scons_make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
<commit_msg>Handle inplace generation of __config__.<commit_after>
|
#!/usr/bin/env python
from os.path import join as pjoin
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.misc_util import scons_generate_config_py
pkgname = 'numpy'
config = Configuration(pkgname,parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
def add_config(*args, **kw):
# Generate __config__, handle inplace issues.
if kw['scons_cmd'].inplace:
target = pjoin(kw['pkg_name'], '__config__.py')
else:
target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'], '__config__.py')
scons_generate_config_py(target)
config.add_sconscript(None, post_hook = add_config)
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy',parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
config.scons_make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
Handle inplace generation of __config__.#!/usr/bin/env python
from os.path import join as pjoin
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.misc_util import scons_generate_config_py
pkgname = 'numpy'
config = Configuration(pkgname,parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
def add_config(*args, **kw):
# Generate __config__, handle inplace issues.
if kw['scons_cmd'].inplace:
target = pjoin(kw['pkg_name'], '__config__.py')
else:
target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'], '__config__.py')
scons_generate_config_py(target)
config.add_sconscript(None, post_hook = add_config)
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
|
<commit_before>#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy',parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
config.scons_make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
<commit_msg>Handle inplace generation of __config__.<commit_after>#!/usr/bin/env python
from os.path import join as pjoin
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.misc_util import scons_generate_config_py
pkgname = 'numpy'
config = Configuration(pkgname,parent_package,top_path, setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_data_dir('doc')
config.add_data_dir('tests')
def add_config(*args, **kw):
# Generate __config__, handle inplace issues.
if kw['scons_cmd'].inplace:
target = pjoin(kw['pkg_name'], '__config__.py')
else:
target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'], '__config__.py')
scons_generate_config_py(target)
config.add_sconscript(None, post_hook = add_config)
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
|
e84d8834359d90f291035008ed91af19be869bfa
|
sourcestats/settings.py
|
sourcestats/settings.py
|
# Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 4000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
|
# Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 1000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
|
Set parallel to 1000 because supervisor + ulimit = fail
|
Set parallel to 1000 because supervisor + ulimit = fail
|
Python
|
mit
|
Fizzadar/SourceServerStats,Fizzadar/SourceServerStats,Fizzadar/SourceServerStats
|
# Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 4000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
Set parallel to 1000 because supervisor + ulimit = fail
|
# Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 1000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
|
<commit_before># Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 4000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
<commit_msg>Set parallel to 1000 because supervisor + ulimit = fail<commit_after>
|
# Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 1000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
|
# Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 4000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
Set parallel to 1000 because supervisor + ulimit = fail# Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 1000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
|
<commit_before># Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 4000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
<commit_msg>Set parallel to 1000 because supervisor + ulimit = fail<commit_after># Source Server Stats
# File: sourcestats/settings.py
# Desc: settings for the Flask server
DEBUG = True
# Number of servers to collect from in parallel
PARALLEL = 1000
# Loop intervals (+time to execute!)
COLLECT_INTERVAL = 30
FIND_INTERVAL = 300
# Timeout for reading addresses via UDP from Valve
MASTER_TIMEOUT = 30
# Timeout for reading status from gameservers
SERVER_TIMEOUT = 30
# Number of times a server fails before blacklisting
FAIL_COUNT = 5
# Batch size for indexing documents in ES
ES_BATCH = 1000
# Default number of terms to aggregate in ES (/players)
ES_TERMS = 1000
ES_INDEX = 'sourcestats'
ES_HOSTS = ['localhost:9200']
#VALVE_HOSTS = ['hl2master.steampowered.com']
VALVE_HOSTS = [
'208.64.200.52',
'208.64.200.65',
'208.64.200.39'
]
# See: https://python-valve.readthedocs.org/en/latest/master_server.html#valve.source.master_server.MasterServerQuerier.find
VALVE_REGIONS = [
u'na',
u'sa',
u'eu',
u'as',
u'oc',
u'af',
u'rest'
]
|
1c1d2b1dfba2fbf02a642da516119a1e280a4bc3
|
account_invoice_merge/__openerp__.py
|
account_invoice_merge/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': 'Elico Corp',
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': "Elico Corp,Odoo Community Association (OCA)",
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
|
Add OCA as author of OCA addons
|
Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.
|
Python
|
agpl-3.0
|
OCA/account-invoicing,OCA/account-invoicing
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': 'Elico Corp',
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': "Elico Corp,Odoo Community Association (OCA)",
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': 'Elico Corp',
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
<commit_msg>Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': "Elico Corp,Odoo Community Association (OCA)",
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': 'Elico Corp',
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': "Elico Corp,Odoo Community Association (OCA)",
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': 'Elico Corp',
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
<commit_msg>Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2011 Elico Corp. All Rights Reserved.
# Author: Ian Li <ian.li@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Merge Wizard',
'version': '1.1.1',
'category': 'Finance',
'author': "Elico Corp,Odoo Community Association (OCA)",
'website': 'http://www.openerp.net.cn',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'wizard/invoice_merge_view.xml',
],
'test': [
],
'demo': [],
'installable': True,
'active': False,
'certificate': False,
}
|
66cc5b8ecae568c3a20948718ef2d4f162cfd786
|
test/test_pycompile.py
|
test/test_pycompile.py
|
"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
|
"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
def test_raises_exception(self):
"""
Test if a syntax error raises an exception
"""
from lesscpy.exceptions import CompilationError
fail_func = lambda: compile(StringIO("a }"), minify=True)
self.assertRaises(CompilationError, fail_func)
|
Add test if compile() raises an CompilationError
|
Add test if compile() raises an CompilationError
|
Python
|
mit
|
lesscpy/lesscpy,fivethreeo/lesscpy,joequery/lesscpy
|
"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
Add test if compile() raises an CompilationError
|
"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
def test_raises_exception(self):
"""
Test if a syntax error raises an exception
"""
from lesscpy.exceptions import CompilationError
fail_func = lambda: compile(StringIO("a }"), minify=True)
self.assertRaises(CompilationError, fail_func)
|
<commit_before>"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
<commit_msg>Add test if compile() raises an CompilationError<commit_after>
|
"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
def test_raises_exception(self):
"""
Test if a syntax error raises an exception
"""
from lesscpy.exceptions import CompilationError
fail_func = lambda: compile(StringIO("a }"), minify=True)
self.assertRaises(CompilationError, fail_func)
|
"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
Add test if compile() raises an CompilationError"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
def test_raises_exception(self):
"""
Test if a syntax error raises an exception
"""
from lesscpy.exceptions import CompilationError
fail_func = lambda: compile(StringIO("a }"), minify=True)
self.assertRaises(CompilationError, fail_func)
|
<commit_before>"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
<commit_msg>Add test if compile() raises an CompilationError<commit_after>"""
Test the high-level compile function
"""
import unittest
from six import StringIO
from lesscpy import compile
class TestCompileFunction(unittest.TestCase):
"""
Unit tests for compile
"""
def test_compile(self):
"""
It can compile input from a file-like object
"""
output = compile(StringIO("a { border-width: 2px * 3; }"), minify=True)
self.assertEqual(output, "a{border-width:6px;}");
def test_raises_exception(self):
"""
Test if a syntax error raises an exception
"""
from lesscpy.exceptions import CompilationError
fail_func = lambda: compile(StringIO("a }"), minify=True)
self.assertRaises(CompilationError, fail_func)
|
3febcda544f372af01e9d2138c131f103ed45455
|
app/soc/mapreduce/delete_gci_data.py
|
app/soc/mapreduce/delete_gci_data.py
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to delete GCI data for safe-harboring.
"""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to insert dummy data for GCI student data for safe-harboring."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
|
Update the docstring for the mapper to reflect what it does correctly.
|
Update the docstring for the mapper to reflect what it does correctly.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to delete GCI data for safe-harboring.
"""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
Update the docstring for the mapper to reflect what it does correctly.
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to insert dummy data for GCI student data for safe-harboring."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
|
<commit_before># Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to delete GCI data for safe-harboring.
"""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
<commit_msg>Update the docstring for the mapper to reflect what it does correctly.<commit_after>
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to insert dummy data for GCI student data for safe-harboring."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to delete GCI data for safe-harboring.
"""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
Update the docstring for the mapper to reflect what it does correctly.# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to insert dummy data for GCI student data for safe-harboring."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
|
<commit_before># Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to delete GCI data for safe-harboring.
"""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
<commit_msg>Update the docstring for the mapper to reflect what it does correctly.<commit_after># Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapreduce to insert dummy data for GCI student data for safe-harboring."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from mapreduce import context
from mapreduce import operation
from soc.modules.gci.logic import profile as profile_logic
def process(student_info):
ctx = context.get()
params = ctx.mapreduce_spec.mapper.params
program_key_str = params['program_key']
program_key = db.Key.from_path('GCIProgram', program_key_str)
# We can skip the student info entity not belonging to the given program.
if student_info.program.key() != program_key:
return
entities, blobs = profile_logic.insertDummyData(student_info)
blobstore.delete(blobs)
for entity in entities:
yield operation.db.Put(entity)
yield operation.counters.Increment("profile dummy data inserted")
|
873fd7db56eadfb0aa4b135c01d0a16f8f240c8a
|
v2/setup.py
|
v2/setup.py
|
#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
|
#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6', 'six >= 1.4.0'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
|
Add a requires on python-six 1.4.0 ( for add_metaclass )
|
Add a requires on python-six 1.4.0 ( for add_metaclass )
This also mean that this doesn't run on RHEL 7 as of today.
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
Add a requires on python-six 1.4.0 ( for add_metaclass )
This also mean that this doesn't run on RHEL 7 as of today.
|
#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6', 'six >= 1.4.0'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
|
<commit_before>#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
<commit_msg>Add a requires on python-six 1.4.0 ( for add_metaclass )
This also mean that this doesn't run on RHEL 7 as of today.<commit_after>
|
#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6', 'six >= 1.4.0'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
|
#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
Add a requires on python-six 1.4.0 ( for add_metaclass )
This also mean that this doesn't run on RHEL 7 as of today.#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6', 'six >= 1.4.0'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
|
<commit_before>#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
<commit_msg>Add a requires on python-six 1.4.0 ( for add_metaclass )
This also mean that this doesn't run on RHEL 7 as of today.<commit_after>#!/usr/bin/env python
import sys
from ansible import __version__
try:
from setuptools import setup, find_packages
except ImportError:
print("Ansible now needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
setup(name='ansible',
version=__version__,
description='Radically simple IT automation',
author='Michael DeHaan',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6', 'six >= 1.4.0'],
# package_dir={ '': 'lib' },
# packages=find_packages('lib'),
package_data={
'': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
},
scripts=[
'bin/ansible',
'bin/ansible-playbook',
# 'bin/ansible-pull',
# 'bin/ansible-doc',
# 'bin/ansible-galaxy',
# 'bin/ansible-vault',
],
data_files=[],
)
|
d98a2f8944c9b1ba6ef587b496987316c33488e5
|
sample-settings.py
|
sample-settings.py
|
SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
|
SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
LOG_FILENAME = 'dump'
|
Add log filename to sample settings for tests.
|
Add log filename to sample settings for tests.
|
Python
|
mit
|
punchagan/statiki,punchagan/statiki
|
SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
Add log filename to sample settings for tests.
|
SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
LOG_FILENAME = 'dump'
|
<commit_before>SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
<commit_msg>Add log filename to sample settings for tests.<commit_after>
|
SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
LOG_FILENAME = 'dump'
|
SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
Add log filename to sample settings for tests.SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
LOG_FILENAME = 'dump'
|
<commit_before>SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
<commit_msg>Add log filename to sample settings for tests.<commit_after>SECRET_KEY = 'Set the secret_key to something unique and secret.'
CLIENT_ID = 'xxxxxxx'
CLIENT_SECRET = 'yyyyyyyyyyyyyyyyyy'
LOG_FILENAME = 'dump'
|
27035d6abba16fb06c8fa548385b33ab08bf787a
|
test/proper_noun_test.py
|
test/proper_noun_test.py
|
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentance():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentance():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_munroe_with_proper_nouns():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
|
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentence():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentence():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_proper_noun_middle_sentence():
assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"])
def test_proper_noun_missing():
assert tag_proper_nouns("Today is cloudy at CW17.") == set()
def test_two_proper_nouns():
assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis",
"Hannon"])
def test_munroe_with_proper_noun():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
def test_munroe_with_proper_noun_and_complex_words():
result = munroe_score("Eilis and Jonathan at a workshop")
assert result['score'] == 0.5
|
Add some more proper noun tests
|
Add some more proper noun tests
Fixes #35.
|
Python
|
mit
|
ejh243/MunroeJargonProfiler,ejh243/MunroeJargonProfiler
|
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentance():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentance():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_munroe_with_proper_nouns():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
Add some more proper noun tests
Fixes #35.
|
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentence():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentence():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_proper_noun_middle_sentence():
assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"])
def test_proper_noun_missing():
assert tag_proper_nouns("Today is cloudy at CW17.") == set()
def test_two_proper_nouns():
assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis",
"Hannon"])
def test_munroe_with_proper_noun():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
def test_munroe_with_proper_noun_and_complex_words():
result = munroe_score("Eilis and Jonathan at a workshop")
assert result['score'] == 0.5
|
<commit_before>
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentance():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentance():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_munroe_with_proper_nouns():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
<commit_msg>Add some more proper noun tests
Fixes #35.<commit_after>
|
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentence():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentence():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_proper_noun_middle_sentence():
assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"])
def test_proper_noun_missing():
assert tag_proper_nouns("Today is cloudy at CW17.") == set()
def test_two_proper_nouns():
assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis",
"Hannon"])
def test_munroe_with_proper_noun():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
def test_munroe_with_proper_noun_and_complex_words():
result = munroe_score("Eilis and Jonathan at a workshop")
assert result['score'] == 0.5
|
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentance():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentance():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_munroe_with_proper_nouns():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
Add some more proper noun tests
Fixes #35.
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentence():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentence():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_proper_noun_middle_sentence():
assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"])
def test_proper_noun_missing():
assert tag_proper_nouns("Today is cloudy at CW17.") == set()
def test_two_proper_nouns():
assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis",
"Hannon"])
def test_munroe_with_proper_noun():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
def test_munroe_with_proper_noun_and_complex_words():
result = munroe_score("Eilis and Jonathan at a workshop")
assert result['score'] == 0.5
|
<commit_before>
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentance():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentance():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_munroe_with_proper_nouns():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
<commit_msg>Add some more proper noun tests
Fixes #35.<commit_after>
from jargonprofiler.util import tag_proper_nouns
from jargonprofiler.munroe import munroe_score
def test_proper_noun_in_sentence():
assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"])
def test_proper_noun_begins_sentence():
assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"])
def test_proper_noun_middle_sentence():
assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"])
def test_proper_noun_missing():
assert tag_proper_nouns("Today is cloudy at CW17.") == set()
def test_two_proper_nouns():
assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis",
"Hannon"])
def test_munroe_with_proper_noun():
result = munroe_score("Eilis is a small girl")
assert result["score"] == 1.0
def test_munroe_with_proper_noun_and_complex_words():
result = munroe_score("Eilis and Jonathan at a workshop")
assert result['score'] == 0.5
|
19ee722aca0fd68d798776d763175aa45e53df48
|
whylog/client/searchers.py
|
whylog/client/searchers.py
|
from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer = fh.read(min(remaining_size, buf_size))
lines = buffer.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
|
from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer_ = fh.read(min(remaining_size, buf_size))
lines = buffer_.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer_[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
|
Change variable name 'buffer' -> 'buffer_' to avoid conflict with python keyword
|
Change variable name 'buffer' -> 'buffer_' to avoid conflict with python keyword
|
Python
|
bsd-3-clause
|
kgromadzki/whylog,epawlowska/whylog,andrzejgorski/whylog,epawlowska/whylog,konefalg/whylog,kgromadzki/whylog,9livesdata/whylog,andrzejgorski/whylog,konefalg/whylog,9livesdata/whylog
|
from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer = fh.read(min(remaining_size, buf_size))
lines = buffer.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
Change variable name 'buffer' -> 'buffer_' to avoid conflict with python keyword
|
from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer_ = fh.read(min(remaining_size, buf_size))
lines = buffer_.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer_[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
|
<commit_before>from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer = fh.read(min(remaining_size, buf_size))
lines = buffer.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
<commit_msg>Change variable name 'buffer' -> 'buffer_' to avoid conflict with python keyword<commit_after>
|
from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer_ = fh.read(min(remaining_size, buf_size))
lines = buffer_.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer_[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
|
from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer = fh.read(min(remaining_size, buf_size))
lines = buffer.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
Change variable name 'buffer' -> 'buffer_' to avoid conflict with python keywordfrom abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer_ = fh.read(min(remaining_size, buf_size))
lines = buffer_.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer_[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
|
<commit_before>from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer = fh.read(min(remaining_size, buf_size))
lines = buffer.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
<commit_msg>Change variable name 'buffer' -> 'buffer_' to avoid conflict with python keyword<commit_after>from abc import ABCMeta, abstractmethod
class Searcher(object):
__metaclass__ = ABCMeta
class IndexSearcher(Searcher):
pass
class DataBaseSearcher(Searcher):
pass
class BacktrackSearcher(Searcher):
def __init__(self, file_path):
self._file_path = file_path
def _reverse_from_offset(self, offset, buf_size=8192):
"""
a generator that returns the lines of a file in reverse order
beginning with the specified offset
"""
with open(self._file_path) as fh:
fh.seek(offset)
total_size = remaining_size = fh.tell()
reverse_offset = 0
truncated = None
while remaining_size > 0:
reverse_offset = min(total_size, reverse_offset + buf_size)
fh.seek(total_size-reverse_offset, 0)
buffer_ = fh.read(min(remaining_size, buf_size))
lines = buffer_.split('\n')
remaining_size -= buf_size
if truncated is not None:
if buffer_[-1] is not '\n':
lines[-1] += truncated
else:
yield truncated
truncated = lines[0]
for index in xrange(len(lines) - 1, 0, -1):
if len(lines[index]):
yield lines[index]
yield truncated
|
8e362baea40a6b11140a93c13fc60c4c0d1ba577
|
scuole/core/utils.py
|
scuole/core/utils.py
|
# -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
|
# -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
Usage:
from core.utils import string_replace
KEY_DICT = {
'Isd': 'ISD',
}
s = string_replace('Beaumont Isd', KEY_DICT)
print s # 'Beaumont ISD'
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
|
Add usage section to docstring for string_replace
|
Add usage section to docstring for string_replace
|
Python
|
mit
|
texastribune/scuole,texastribune/scuole,texastribune/scuole,texastribune/scuole
|
# -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
Add usage section to docstring for string_replace
|
# -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
Usage:
from core.utils import string_replace
KEY_DICT = {
'Isd': 'ISD',
}
s = string_replace('Beaumont Isd', KEY_DICT)
print s # 'Beaumont ISD'
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
|
<commit_before># -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
<commit_msg>Add usage section to docstring for string_replace<commit_after>
|
# -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
Usage:
from core.utils import string_replace
KEY_DICT = {
'Isd': 'ISD',
}
s = string_replace('Beaumont Isd', KEY_DICT)
print s # 'Beaumont ISD'
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
|
# -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
Add usage section to docstring for string_replace# -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
Usage:
from core.utils import string_replace
KEY_DICT = {
'Isd': 'ISD',
}
s = string_replace('Beaumont Isd', KEY_DICT)
print s # 'Beaumont ISD'
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
|
<commit_before># -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
<commit_msg>Add usage section to docstring for string_replace<commit_after># -*- coding: utf-8 -*-
import re
def string_replace(text, key_dict):
"""
A function to convert text in a string to another string if
it matches any of the keys in the provided pattern dictionary.
Usage:
from core.utils import string_replace
KEY_DICT = {
'Isd': 'ISD',
}
s = string_replace('Beaumont Isd', KEY_DICT)
print s # 'Beaumont ISD'
"""
rx = re.compile('|'.join(map(re.escape, key_dict)))
def one_xlat(match):
return key_dict[match.group(0)]
return rx.sub(one_xlat, text)
|
72538db91eb722240bc23defd688f11356c54c25
|
scripts/balance.py
|
scripts/balance.py
|
#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
# Compute a genome-wide balancing/bias/normalization vector
# *** assumes uniform binning ***
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(COOLER_PATH, 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
|
#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import argparse
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Compute a genome-wide balancing/bias/normalization vector. Assumes uniform binning.")
parser.add_argument(
"cooler_file",
help="Cooler file",
metavar="COOLER_PATH")
args = vars(parser.parse_args())
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(args['cooler_file'], 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
|
Add arg parser to balancing script
|
Add arg parser to balancing script
|
Python
|
bsd-3-clause
|
mirnylab/cooler
|
#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
# Compute a genome-wide balancing/bias/normalization vector
# *** assumes uniform binning ***
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(COOLER_PATH, 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
Add arg parser to balancing script
|
#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import argparse
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Compute a genome-wide balancing/bias/normalization vector. Assumes uniform binning.")
parser.add_argument(
"cooler_file",
help="Cooler file",
metavar="COOLER_PATH")
args = vars(parser.parse_args())
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(args['cooler_file'], 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
|
<commit_before>#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
# Compute a genome-wide balancing/bias/normalization vector
# *** assumes uniform binning ***
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(COOLER_PATH, 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
<commit_msg>Add arg parser to balancing script<commit_after>
|
#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import argparse
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Compute a genome-wide balancing/bias/normalization vector. Assumes uniform binning.")
parser.add_argument(
"cooler_file",
help="Cooler file",
metavar="COOLER_PATH")
args = vars(parser.parse_args())
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(args['cooler_file'], 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
|
#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
# Compute a genome-wide balancing/bias/normalization vector
# *** assumes uniform binning ***
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(COOLER_PATH, 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
Add arg parser to balancing script#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import argparse
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Compute a genome-wide balancing/bias/normalization vector. Assumes uniform binning.")
parser.add_argument(
"cooler_file",
help="Cooler file",
metavar="COOLER_PATH")
args = vars(parser.parse_args())
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(args['cooler_file'], 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
|
<commit_before>#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
# Compute a genome-wide balancing/bias/normalization vector
# *** assumes uniform binning ***
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(COOLER_PATH, 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
<commit_msg>Add arg parser to balancing script<commit_after>#!/usr/bin/env python
from __future__ import division, print_function
from multiprocessing import Pool
import argparse
import numpy as np
import h5py
import cooler
import cooler.ice
N_CPUS = 5
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Compute a genome-wide balancing/bias/normalization vector. Assumes uniform binning.")
parser.add_argument(
"cooler_file",
help="Cooler file",
metavar="COOLER_PATH")
args = vars(parser.parse_args())
chunksize = int(100e6)
try:
pool = Pool(N_CPUS)
with h5py.File(args['cooler_file'], 'a') as h5:
bias = cooler.ice.iterative_correction(
h5, chunksize=chunksize, tol=1e-05, min_nnz=100,
cis_only=False, ignore_diags=3, map=pool.map)
# add the bias column to the file
if 'weight' in h5['bins']:
del h5['bins']['weight']
h5['bins'].create_dataset('weight', data=bias, **h5opts)
finally:
pool.close()
|
f4b0d6855a56270435f3fff65d4652abc2da518a
|
casepro/settings_production_momza.py
|
casepro/settings_production_momza.py
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
LOGGING['loggers']['casepro.backend.junebug'] = {
'handlers': ['console'],
'level': 'INFO',
}
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
Add logger for junebug backend
|
Add logger for junebug backend
|
Python
|
bsd-3-clause
|
praekelt/casepro,praekelt/casepro,praekelt/casepro
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
Add logger for junebug backend
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
LOGGING['loggers']['casepro.backend.junebug'] = {
'handlers': ['console'],
'level': 'INFO',
}
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
<commit_before>from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
<commit_msg>Add logger for junebug backend<commit_after>
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
LOGGING['loggers']['casepro.backend.junebug'] = {
'handlers': ['console'],
'level': 'INFO',
}
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
Add logger for junebug backendfrom __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
LOGGING['loggers']['casepro.backend.junebug'] = {
'handlers': ['console'],
'level': 'INFO',
}
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
<commit_before>from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
<commit_msg>Add logger for junebug backend<commit_after>from __future__ import unicode_literals
import os
# import our default settings
from settings_production import * # noqa
LOGGING['loggers']['casepro.backend.junebug'] = {
'handlers': ['console'],
'level': 'INFO',
}
PODS[0]['contact_id_fieldname'] = os.environ.get( # noqa: F405
'REGISTRATION_CONTACT_ID_FIELDNAME',
'registrant_id',
)
PODS[0]['field_mapping'] = [ # noqa: F405
{"field": "faccode", "field_name": "Facility Code"},
{"field": "reg_type", "field_name": "Registration Type"},
{"field": "mom_dob", "field_name": "Mother's Date of Birth"},
{"field": "edd", "field_name": "Expected Due Date"},
]
|
27e38309648c3c54dd8dd5dca1d1d19ff9d7381f
|
private_media/urls.py
|
private_media/urls.py
|
from django.conf.urls import url
from django.conf import settings
from views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
|
from django.conf.urls import url
from django.conf import settings
from .views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
|
Use local import for local files.
|
Use local import for local files.
|
Python
|
bsd-3-clause
|
sha-red/django-private-media
|
from django.conf.urls import url
from django.conf import settings
from views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
Use local import for local files.
|
from django.conf.urls import url
from django.conf import settings
from .views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
|
<commit_before>from django.conf.urls import url
from django.conf import settings
from views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
<commit_msg>Use local import for local files.<commit_after>
|
from django.conf.urls import url
from django.conf import settings
from .views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
|
from django.conf.urls import url
from django.conf import settings
from views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
Use local import for local files.from django.conf.urls import url
from django.conf import settings
from .views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
|
<commit_before>from django.conf.urls import url
from django.conf import settings
from views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
<commit_msg>Use local import for local files.<commit_after>from django.conf.urls import url
from django.conf import settings
from .views import serve_private_file
urlpatterns = [
url(r'^{0}(?P<path>.*)$'.format(settings.PRIVATE_MEDIA_URL.lstrip('/')), serve_private_file),
]
|
e53715c6ee7896d459a46c810480b12dc7a6b5ad
|
tg/dottednames/jinja_lookup.py
|
tg/dottednames/jinja_lookup.py
|
"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['pylons.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = file(template)
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
|
"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['tg.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = open(template, 'rb')
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
|
Fix jinja loader on Py3
|
Fix jinja loader on Py3
|
Python
|
mit
|
lucius-feng/tg2,lucius-feng/tg2
|
"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['pylons.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = file(template)
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
Fix jinja loader on Py3
|
"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['tg.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = open(template, 'rb')
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
|
<commit_before>"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['pylons.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = file(template)
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
<commit_msg>Fix jinja loader on Py3<commit_after>
|
"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['tg.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = open(template, 'rb')
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
|
"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['pylons.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = file(template)
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
Fix jinja loader on Py3"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['tg.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = open(template, 'rb')
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
|
<commit_before>"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['pylons.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = file(template)
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
<commit_msg>Fix jinja loader on Py3<commit_after>"""Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.jinja'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['tg.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = open(template, 'rb')
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
|
2a83a1606ffb7e761592a5b0a73e31d9b8b1fe08
|
bin/example_game_programmatic.py
|
bin/example_game_programmatic.py
|
from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
game._run()
|
from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
# Move the player down from the church to the crypt
game.process_input('d')
game.run()
|
Add Game.process_input use to example code
|
Add Game.process_input use to example code
|
Python
|
unlicense
|
mmurdoch/Vengeance,mmurdoch/Vengeance
|
from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
game._run()Add Game.process_input use to example code
|
from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
# Move the player down from the church to the crypt
game.process_input('d')
game.run()
|
<commit_before>from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
game._run()<commit_msg>Add Game.process_input use to example code<commit_after>
|
from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
# Move the player down from the church to the crypt
game.process_input('d')
game.run()
|
from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
game._run()Add Game.process_input use to example codefrom vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
# Move the player down from the church to the crypt
game.process_input('d')
game.run()
|
<commit_before>from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
game._run()<commit_msg>Add Game.process_input use to example code<commit_after>from vengeance.game import Direction
from vengeance.game import Game
from vengeance.game import Location
go_up = Direction('up')
go_down = Direction('down')
go_up.opposite = go_down
go_in = Direction('in')
go_out = Direction('out')
go_in.opposite = go_out
go_west = Direction('west')
go_east = Direction('east')
go_west.opposite = go_east
church = Location('A Church', 'Tiny place of worship')
crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi')
coffin = Location('A Coffin', 'A tight squeeze and pitch dark')
cave = Location('A Cave')
church.add_exit(go_down, crypt)
crypt.add_one_way_exit(go_in, coffin)
crypt.add_exit(go_west, cave)
game = Game([church, crypt, coffin, cave])
# Move the player down from the church to the crypt
game.process_input('d')
game.run()
|
23b54e836a94a2d1ebdb919a30d19ca4523d45b5
|
project_template.py
|
project_template.py
|
import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
|
import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.items():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
# Show quick panel for selecting template
self.template_names = list(self.templates.keys())
self.window.show_quick_panel(self.template_names,
self.on_selected)
def on_selected(self, idx):
if idx < 0:
# No template selected
return
template_name = self.template_names[idx]
print(template_name)
|
Implement selecting template by quick panel
|
Implement selecting template by quick panel
|
Python
|
mit
|
autopp/SublimeProjectTemplate,autopp/SublimeProjectTemplate
|
import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
Implement selecting template by quick panel
|
import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.items():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
# Show quick panel for selecting template
self.template_names = list(self.templates.keys())
self.window.show_quick_panel(self.template_names,
self.on_selected)
def on_selected(self, idx):
if idx < 0:
# No template selected
return
template_name = self.template_names[idx]
print(template_name)
|
<commit_before>import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
<commit_msg>Implement selecting template by quick panel<commit_after>
|
import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.items():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
# Show quick panel for selecting template
self.template_names = list(self.templates.keys())
self.window.show_quick_panel(self.template_names,
self.on_selected)
def on_selected(self, idx):
if idx < 0:
# No template selected
return
template_name = self.template_names[idx]
print(template_name)
|
import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
Implement selecting template by quick panelimport sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.items():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
# Show quick panel for selecting template
self.template_names = list(self.templates.keys())
self.window.show_quick_panel(self.template_names,
self.on_selected)
def on_selected(self, idx):
if idx < 0:
# No template selected
return
template_name = self.template_names[idx]
print(template_name)
|
<commit_before>import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.values():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
<commit_msg>Implement selecting template by quick panel<commit_after>import sublime
import sublime_plugin
class ProjectTemplateCommand(sublime_plugin.WindowCommand):
SETTINGS_FILE_NAME = 'ProjectTemplate.sublime-settings'
TEMPLATES_KEY = 'templates'
def run(self):
# Check whether the folder is open only one in the current window.
folders = self.window.folders()
msg = None
if len(folders) == 0:
msg = "No floder opened in the current window."
elif len(folders) > 1:
msg = "Multiple folder opened in the current window."
if msg:
sublime.error_message(msg)
return
self.folder = folders[0]
# Load settings
settings = sublime.load_settings(self.SETTINGS_FILE_NAME)
self.templates = settings.get(self.TEMPLATES_KEY, {})
# Check the format of templates
if type(self.templates) != dict:
sublime.error_message("The templates should be an object.")
return
for name, template in self.templates.items():
if type(template) != dict:
msg = (
"Template '%s' is not a object.\n"
"Each of the template should be an object."
) % (name)
sublime.error_message(msg)
return
# Show quick panel for selecting template
self.template_names = list(self.templates.keys())
self.window.show_quick_panel(self.template_names,
self.on_selected)
def on_selected(self, idx):
if idx < 0:
# No template selected
return
template_name = self.template_names[idx]
print(template_name)
|
9f71fd2df043bc6eedbd945100633d3184356c89
|
tools/pyhande/pyhande/utils.py
|
tools/pyhande/pyhande/utils.py
|
'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data['iterations']).unique()
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
|
'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data, name='iterations'):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data[name].unique())
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
|
Correct location of bracket so that grouping by beta loops is done correctly.
|
Correct location of bracket so that grouping by beta loops is done correctly.
|
Python
|
lgpl-2.1
|
hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,ruthfranklin/hande,hande-qmc/hande,hande-qmc/hande
|
'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data['iterations']).unique()
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
Correct location of bracket so that grouping by beta loops is done correctly.
|
'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data, name='iterations'):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data[name].unique())
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
|
<commit_before>'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data['iterations']).unique()
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
<commit_msg>Correct location of bracket so that grouping by beta loops is done correctly.<commit_after>
|
'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data, name='iterations'):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data[name].unique())
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
|
'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data['iterations']).unique()
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
Correct location of bracket so that grouping by beta loops is done correctly.'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data, name='iterations'):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data[name].unique())
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
|
<commit_before>'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data['iterations']).unique()
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
<commit_msg>Correct location of bracket so that grouping by beta loops is done correctly.<commit_after>'''Utility procedures for manipulating HANDE data.'''
import numpy as np
def groupby_beta_loops(data, name='iterations'):
'''Group a HANDE DMQMC data table by beta loop.
Parameters
----------
data : :class:`pandas.DataFrame`
DMQMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped by beta loop.
'''
# Exploit the fact that (except for possibly the last beta loop due to wall
# time) each beta loop contains the same set of iterations.
indx = np.arange(len(data)) // len(data[name].unique())
return data.groupby(indx)
def groupby_iterations(data):
'''Group a HANDE QMC data table by blocks of iterations.
Parameters
----------
data : :class:`pandas.DataFrame`
QMC data table (e.g. obtained by :func:`pyhande.extract.extract_data`.
Returns
-------
grouped : :class:`pandas.DataFrameGroupBy`
GroupBy object with data table grouped into blocks within which the
iteration count increases monotonically.
'''
indx = np.zeros(len(data))
prev_iteration = -1
curr_indx = 0
for i in range(len(data)):
if data['iterations'].iloc[i] < prev_iteration:
# new block of iterations
curr_indx += 1
indx[i] = curr_indx
prev_iteration = data['iterations'].iloc[i]
return data.groupby(indx)
|
7a8a2556bbeb255c991aa5a39aa04b4fed238a7b
|
kolibri/plugins/setup_wizard/middleware.py
|
kolibri/plugins/setup_wizard/middleware.py
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
# manually add session bootstrapping api call to allowed path list
allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
"session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
Add 'session-list' to constants list.
|
Add 'session-list' to constants list.
|
Python
|
mit
|
DXCanas/kolibri,christianmemije/kolibri,learningequality/kolibri,jonboiser/kolibri,jonboiser/kolibri,rtibbles/kolibri,aronasorman/kolibri,learningequality/kolibri,rtibbles/kolibri,christianmemije/kolibri,jayoshih/kolibri,jayoshih/kolibri,jayoshih/kolibri,learningequality/kolibri,christianmemije/kolibri,DXCanas/kolibri,learningequality/kolibri,lyw07/kolibri,benjaoming/kolibri,lyw07/kolibri,jonboiser/kolibri,DXCanas/kolibri,MingDai/kolibri,lyw07/kolibri,rtibbles/kolibri,mrpau/kolibri,mrpau/kolibri,jonboiser/kolibri,indirectlylit/kolibri,aronasorman/kolibri,benjaoming/kolibri,benjaoming/kolibri,jayoshih/kolibri,DXCanas/kolibri,MingDai/kolibri,mrpau/kolibri,rtibbles/kolibri,benjaoming/kolibri,lyw07/kolibri,aronasorman/kolibri,mrpau/kolibri,MingDai/kolibri,MingDai/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,christianmemije/kolibri,indirectlylit/kolibri,aronasorman/kolibri
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
# manually add session bootstrapping api call to allowed path list
allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
Add 'session-list' to constants list.
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
"session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
<commit_before>from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
# manually add session bootstrapping api call to allowed path list
allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
<commit_msg>Add 'session-list' to constants list.<commit_after>
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
"session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
# manually add session bootstrapping api call to allowed path list
allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
Add 'session-list' to constants list.from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
"session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
<commit_before>from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
# manually add session bootstrapping api call to allowed path list
allowed_paths.append(reverse('session-detail', kwargs={'pk': 'current'}))
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
<commit_msg>Add 'session-list' to constants list.<commit_after>from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from kolibri.auth.models import DeviceOwner
ALLOWED_PATH_LIST = [
"facility-list",
"deviceowner-list",
"kolibri:setupwizardplugin:setupwizard",
"task-localdrive",
"task-startremoteimport",
"task-list",
"session-list"
]
class SetupWizardMiddleware():
"""
display the superuser creation app if no superuser exists.
"""
deviceowner_already_created = False
def process_request(self, request):
# If a DeviceOwner has already been created, no need to do anything here
self.deviceowner_already_created = self.deviceowner_already_created or DeviceOwner.objects.exists()
if self.deviceowner_already_created:
if request.path.startswith(reverse("kolibri:setupwizardplugin:setupwizard")):
return redirect(reverse("kolibri:learnplugin:learn"))
return
# Don't redirect for URLs that are required for the setup wizard
allowed_paths = [reverse(name) for name in ALLOWED_PATH_LIST]
if any(request.path.startswith(path_prefix) for path_prefix in allowed_paths):
return
# If we've gotten this far, we want to redirect to the setup wizard
return redirect(reverse("kolibri:setupwizardplugin:setupwizard"))
|
d3f63e13499af783fe63f86ffbd23e30b7bed518
|
tests/settings/common.py
|
tests/settings/common.py
|
SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
|
SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
'tests',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
|
Add the tests app to the INSTALLED_APPS.
|
Add the tests app to the INSTALLED_APPS.
|
Python
|
bsd-3-clause
|
unt-libraries/django-premis-event-service,unt-libraries/django-premis-event-service,unt-libraries/django-premis-event-service
|
SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
Add the tests app to the INSTALLED_APPS.
|
SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
'tests',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
|
<commit_before>SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
<commit_msg>Add the tests app to the INSTALLED_APPS.<commit_after>
|
SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
'tests',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
|
SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
Add the tests app to the INSTALLED_APPS.SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
'tests',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
|
<commit_before>SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
<commit_msg>Add the tests app to the INSTALLED_APPS.<commit_after>SECRET_KEY = 'p&grn73^$c!ae=o)igek_rn2t#(_sb9g1kqwxcpv16-ie__1=1'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'premis_event_service',
'tests',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MAINTENANCE_MSG = None
|
46c0543306d11551f9c818922dc2b2b4bf3d3b4d
|
byceps/services/email/service.py
|
byceps/services/email/service.py
|
"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
|
"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from ...util.jobqueue import enqueue
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def enqueue_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Enqueue an e-mail to be sent asynchronously."""
enqueue(send_email, recipients, subject, body, sender=sender)
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
|
Add function to enqueue e-mails to be sent asynchronously rather than blocking/sending synchronously
|
Add function to enqueue e-mails to be sent asynchronously rather than blocking/sending synchronously
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
Add function to enqueue e-mails to be sent asynchronously rather than blocking/sending synchronously
|
"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from ...util.jobqueue import enqueue
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def enqueue_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Enqueue an e-mail to be sent asynchronously."""
enqueue(send_email, recipients, subject, body, sender=sender)
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
|
<commit_before>"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
<commit_msg>Add function to enqueue e-mails to be sent asynchronously rather than blocking/sending synchronously<commit_after>
|
"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from ...util.jobqueue import enqueue
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def enqueue_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Enqueue an e-mail to be sent asynchronously."""
enqueue(send_email, recipients, subject, body, sender=sender)
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
|
"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
Add function to enqueue e-mails to be sent asynchronously rather than blocking/sending synchronously"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from ...util.jobqueue import enqueue
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def enqueue_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Enqueue an e-mail to be sent asynchronously."""
enqueue(send_email, recipients, subject, body, sender=sender)
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
|
<commit_before>"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
<commit_msg>Add function to enqueue e-mails to be sent asynchronously rather than blocking/sending synchronously<commit_after>"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from ...util.jobqueue import enqueue
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def enqueue_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Enqueue an e-mail to be sent asynchronously."""
enqueue(send_email, recipients, subject, body, sender=sender)
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
|
77ac03544f85e95603507e1dc0cec2189e0d5a03
|
get_ip.py
|
get_ip.py
|
#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect(region):
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(region, tag_name):
conn = connect(region)
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
elif opt in ("-r", "--region"):
region = arg
print_ips(region, tag_name)
|
#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect():
metadata = boto.utils.get_instance_metadata()
region = metadata['placement']['availability-zone'][:-1]
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(tag_name):
conn = connect()
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
tag_name = ""
region = ""
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
print_ips(tag_name)
|
Add python script for IPs discovery
|
Add python script for IPs discovery
|
Python
|
bsd-3-clause
|
GetStream/Stream-Framework-Bench,GetStream/Stream-Framework-Bench
|
#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect(region):
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(region, tag_name):
conn = connect(region)
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
elif opt in ("-r", "--region"):
region = arg
print_ips(region, tag_name)
Add python script for IPs discovery
|
#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect():
metadata = boto.utils.get_instance_metadata()
region = metadata['placement']['availability-zone'][:-1]
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(tag_name):
conn = connect()
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
tag_name = ""
region = ""
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
print_ips(tag_name)
|
<commit_before>#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect(region):
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(region, tag_name):
conn = connect(region)
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
elif opt in ("-r", "--region"):
region = arg
print_ips(region, tag_name)
<commit_msg>Add python script for IPs discovery<commit_after>
|
#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect():
metadata = boto.utils.get_instance_metadata()
region = metadata['placement']['availability-zone'][:-1]
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(tag_name):
conn = connect()
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
tag_name = ""
region = ""
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
print_ips(tag_name)
|
#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect(region):
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(region, tag_name):
conn = connect(region)
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
elif opt in ("-r", "--region"):
region = arg
print_ips(region, tag_name)
Add python script for IPs discovery#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect():
metadata = boto.utils.get_instance_metadata()
region = metadata['placement']['availability-zone'][:-1]
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(tag_name):
conn = connect()
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
tag_name = ""
region = ""
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
print_ips(tag_name)
|
<commit_before>#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect(region):
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(region, tag_name):
conn = connect(region)
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
elif opt in ("-r", "--region"):
region = arg
print_ips(region, tag_name)
<commit_msg>Add python script for IPs discovery<commit_after>#!/usr/bin/python
#
# Get private IPv4s for a given instance name.
#
import boto
import boto.ec2
import getopt
import sys
#
# Get the profile
#
def connect():
metadata = boto.utils.get_instance_metadata()
region = metadata['placement']['availability-zone'][:-1]
profile = metadata['iam']['info']['InstanceProfileArn']
profile = profile[profile.find('/') + 1:]
conn = boto.ec2.connection.EC2Connection(
region=boto.ec2.get_region(region),
aws_access_key_id=metadata['iam']['security-credentials'][profile]['AccessKeyId'],
aws_secret_access_key=metadata['iam']['security-credentials'][profile]['SecretAccessKey'],
security_token=metadata['iam']['security-credentials'][profile]['Token']
)
return conn
#
# Print out private IPv4
#
def print_ips(tag_name):
conn = connect()
reservations = conn.get_all_instances(filters={"tag:Name": tag_name})
print("%s" % (reservations[0]["Instances"][0]["PrivateIpAddress"]))
#
# Main
#
opts, args = getopt.getopt(sys.argv[1:], "Lt:r:", ["tag-name", "region"])
tag_name = ""
region = ""
for opt, arg in opts:
if opt in ("-t", "--tag-name"):
tag_name = arg
print_ips(tag_name)
|
7fabf481ed788350aa0c94eec7c71d6cfb75c14a
|
store/forms.py
|
store/forms.py
|
from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
|
from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text','product']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
|
Add product field to ReviewForm
|
Add product field to ReviewForm
|
Python
|
bsd-3-clause
|
kevgathuku/compshop,kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop
|
from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
Add product field to ReviewForm
|
from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text','product']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
|
<commit_before>from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
<commit_msg>Add product field to ReviewForm<commit_after>
|
from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text','product']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
|
from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
Add product field to ReviewFormfrom django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text','product']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
|
<commit_before>from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
<commit_msg>Add product field to ReviewForm<commit_after>from django import forms
from .models import Review
class ReviewForm(forms.models.ModelForm):
name = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'placeholder': 'Your Name',
'class': 'form-control',
}),
)
class Meta:
model = Review
fields = ['title', 'rating', 'text','product']
widgets = {
'title': forms.fields.TextInput(
attrs={
'placeholder': 'Short Summary e.g. Awesome Product',
'class': 'form-control',
}),
'text': forms.Textarea(
attrs={
'placeholder': 'Your Review',
'class': 'form-control',
'rows': 5,
}),
}
|
4561586f3f1de1a7a86213bec3ddd6273c223cdd
|
runtests.py
|
runtests.py
|
"""
Standalone test runner for wardrounds plugin
"""
import os
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
|
"""
Standalone test runner for wardrounds plugin
"""
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
|
Kill an Unused import - thanks to @landscapeio
|
Kill an Unused import - thanks to @landscapeio
|
Python
|
agpl-3.0
|
openhealthcare/opal-referral,openhealthcare/opal-referral,openhealthcare/opal-referral,openhealthcare/opal-referral
|
"""
Standalone test runner for wardrounds plugin
"""
import os
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
Kill an Unused import - thanks to @landscapeio
|
"""
Standalone test runner for wardrounds plugin
"""
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
|
<commit_before>"""
Standalone test runner for wardrounds plugin
"""
import os
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
<commit_msg>Kill an Unused import - thanks to @landscapeio<commit_after>
|
"""
Standalone test runner for wardrounds plugin
"""
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
|
"""
Standalone test runner for wardrounds plugin
"""
import os
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
Kill an Unused import - thanks to @landscapeio"""
Standalone test runner for wardrounds plugin
"""
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
|
<commit_before>"""
Standalone test runner for wardrounds plugin
"""
import os
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
<commit_msg>Kill an Unused import - thanks to @landscapeio<commit_after>"""
Standalone test runner for wardrounds plugin
"""
import sys
from opal.core import application
class Application(application.OpalApplication):
pass
from django.conf import settings
settings.configure(DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
OPAL_OPTIONS_MODULE = 'referral.tests.dummy_options_module',
ROOT_URLCONF='referral.urls',
STATIC_URL='/assets/',
INSTALLED_APPS=('django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'opal',
'opal.tests',
'referral',))
from django.test.runner import DiscoverRunner
test_runner = DiscoverRunner(verbosity=1)
if len(sys.argv) == 2:
failures = test_runner.run_tests([sys.argv[-1], ])
else:
failures = test_runner.run_tests(['referral', ])
if failures:
sys.exit(failures)
|
ea1189cbba231d68e82ec0fe25f33402d0ea956c
|
common/lib/chem/setup.py
|
common/lib/chem/setup.py
|
from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0"
"nltk==2.0.5",
],
)
|
from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0",
"nltk==2.0.5",
],
)
|
Fix syntax of previous commit
|
Fix syntax of previous commit
|
Python
|
agpl-3.0
|
marcore/edx-platform,marcore/edx-platform,marcore/edx-platform,marcore/edx-platform
|
from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0"
"nltk==2.0.5",
],
)
Fix syntax of previous commit
|
from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0",
"nltk==2.0.5",
],
)
|
<commit_before>from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0"
"nltk==2.0.5",
],
)
<commit_msg>Fix syntax of previous commit<commit_after>
|
from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0",
"nltk==2.0.5",
],
)
|
from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0"
"nltk==2.0.5",
],
)
Fix syntax of previous commitfrom setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0",
"nltk==2.0.5",
],
)
|
<commit_before>from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0"
"nltk==2.0.5",
],
)
<commit_msg>Fix syntax of previous commit<commit_after>from setuptools import setup
setup(
name="chem",
version="0.1.1",
packages=["chem"],
install_requires=[
"pyparsing==2.0.1",
"numpy==1.6.2",
"scipy==0.14.0",
"nltk==2.0.5",
],
)
|
4aea3f18b68150f1bff7ca40d22ce69ce2be64e0
|
mfr/extensions/tabular/libs/ezodf_tools.py
|
mfr/extensions/tabular/libs/ezodf_tools.py
|
""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return header, data
|
""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return sheets
|
Return sheets, not header, data
|
Return sheets, not header, data
|
Python
|
apache-2.0
|
felliott/modular-file-renderer,Johnetordoff/modular-file-renderer,TomBaxter/modular-file-renderer,rdhyee/modular-file-renderer,CenterForOpenScience/modular-file-renderer,AddisonSchiller/modular-file-renderer,AddisonSchiller/modular-file-renderer,rdhyee/modular-file-renderer,TomBaxter/modular-file-renderer,CenterForOpenScience/modular-file-renderer,rdhyee/modular-file-renderer,Johnetordoff/modular-file-renderer,TomBaxter/modular-file-renderer,felliott/modular-file-renderer,Johnetordoff/modular-file-renderer,felliott/modular-file-renderer,felliott/modular-file-renderer,Johnetordoff/modular-file-renderer,TomBaxter/modular-file-renderer,CenterForOpenScience/modular-file-renderer,AddisonSchiller/modular-file-renderer,CenterForOpenScience/modular-file-renderer,rdhyee/modular-file-renderer,AddisonSchiller/modular-file-renderer
|
""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return header, data
Return sheets, not header, data
|
""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return sheets
|
<commit_before>""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return header, data
<commit_msg>Return sheets, not header, data<commit_after>
|
""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return sheets
|
""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return header, data
Return sheets, not header, data""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return sheets
|
<commit_before>""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return header, data
<commit_msg>Return sheets, not header, data<commit_after>""" This library works for some ods files but not others. Because it doesn't
work consistently, we have disabled this for the moment."""
import ezodf
from ..utilities import data_population, header_population
def ods_ezodf(fp):
"""Read and convert a ods file to JSON format using the ezodf library
:param fp: File pointer object
:return: tuple of table headers and data
"""
workbook = ezodf.opendoc(fp.name)
sheets = {}
for sheet in workbook.sheets:
list_data = [[cell.value for cell in row] for row in sheet.rows()]
header = header_population(list_data[0])
data = data_population(list_data)
sheets[str(sheet)] = (header, data)
return sheets
|
8a6015610bba2dcdc0a2cb031b2f58606328841f
|
src/fastpb/generator.py
|
src/fastpb/generator.py
|
#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
log = sys.stderr
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
path = tempfile.mkdtemp()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
cFilePath = os.path.join(path, name + '.c')
with open(cFilePath, 'w') as f:
t = Template(resource_string(__name__, 'template/module.jinja.c'))
f.write(t.render(context))
setupPyPath = os.path.join(path, 'setup.py')
with open(setupPyPath, 'w') as f:
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
f.write(t.render({'files': files}))
print >> log, path
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
response = plugin_pb2.CodeGeneratorResponse()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
# Write the C file.
t = Template(resource_string(__name__, 'template/module.jinja.c'))
cFile = response.file.add()
cFile.name = name + '.c'
cFile.content = t.render(context)
# Write setup.py.
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
setupFile = response.file.add()
setupFile.name = 'setup.py'
setupFile.content = t.render({'files': files})
sys.stdout.write(response.SerializeToString())
if __name__ == '__main__':
main()
|
Use protoc for file output
|
Use protoc for file output
|
Python
|
apache-2.0
|
Cue/fast-python-pb
|
#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
log = sys.stderr
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
path = tempfile.mkdtemp()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
cFilePath = os.path.join(path, name + '.c')
with open(cFilePath, 'w') as f:
t = Template(resource_string(__name__, 'template/module.jinja.c'))
f.write(t.render(context))
setupPyPath = os.path.join(path, 'setup.py')
with open(setupPyPath, 'w') as f:
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
f.write(t.render({'files': files}))
print >> log, path
if __name__ == '__main__':
main()
Use protoc for file output
|
#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
response = plugin_pb2.CodeGeneratorResponse()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
# Write the C file.
t = Template(resource_string(__name__, 'template/module.jinja.c'))
cFile = response.file.add()
cFile.name = name + '.c'
cFile.content = t.render(context)
# Write setup.py.
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
setupFile = response.file.add()
setupFile.name = 'setup.py'
setupFile.content = t.render({'files': files})
sys.stdout.write(response.SerializeToString())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
log = sys.stderr
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
path = tempfile.mkdtemp()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
cFilePath = os.path.join(path, name + '.c')
with open(cFilePath, 'w') as f:
t = Template(resource_string(__name__, 'template/module.jinja.c'))
f.write(t.render(context))
setupPyPath = os.path.join(path, 'setup.py')
with open(setupPyPath, 'w') as f:
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
f.write(t.render({'files': files}))
print >> log, path
if __name__ == '__main__':
main()
<commit_msg>Use protoc for file output<commit_after>
|
#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
response = plugin_pb2.CodeGeneratorResponse()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
# Write the C file.
t = Template(resource_string(__name__, 'template/module.jinja.c'))
cFile = response.file.add()
cFile.name = name + '.c'
cFile.content = t.render(context)
# Write setup.py.
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
setupFile = response.file.add()
setupFile.name = 'setup.py'
setupFile.content = t.render({'files': files})
sys.stdout.write(response.SerializeToString())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
log = sys.stderr
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
path = tempfile.mkdtemp()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
cFilePath = os.path.join(path, name + '.c')
with open(cFilePath, 'w') as f:
t = Template(resource_string(__name__, 'template/module.jinja.c'))
f.write(t.render(context))
setupPyPath = os.path.join(path, 'setup.py')
with open(setupPyPath, 'w') as f:
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
f.write(t.render({'files': files}))
print >> log, path
if __name__ == '__main__':
main()
Use protoc for file output#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
response = plugin_pb2.CodeGeneratorResponse()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
# Write the C file.
t = Template(resource_string(__name__, 'template/module.jinja.c'))
cFile = response.file.add()
cFile.name = name + '.c'
cFile.content = t.render(context)
# Write setup.py.
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
setupFile = response.file.add()
setupFile.name = 'setup.py'
setupFile.content = t.render({'files': files})
sys.stdout.write(response.SerializeToString())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
log = sys.stderr
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
path = tempfile.mkdtemp()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
cFilePath = os.path.join(path, name + '.c')
with open(cFilePath, 'w') as f:
t = Template(resource_string(__name__, 'template/module.jinja.c'))
f.write(t.render(context))
setupPyPath = os.path.join(path, 'setup.py')
with open(setupPyPath, 'w') as f:
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
f.write(t.render({'files': files}))
print >> log, path
if __name__ == '__main__':
main()
<commit_msg>Use protoc for file output<commit_after>#!/usr/bin/env python
import plugin_pb2
from jinja2 import Template
from pkg_resources import resource_string
import os.path
import sys
import tempfile
def main():
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
response = plugin_pb2.CodeGeneratorResponse()
generateFiles = set(request.file_to_generate)
files = []
for file in request.proto_file:
if file.name not in generateFiles:
continue
name = file.name.split('.')[0]
files.append(name)
context = {
'moduleName': name,
'messages': file.message_type
}
# Write the C file.
t = Template(resource_string(__name__, 'template/module.jinja.c'))
cFile = response.file.add()
cFile.name = name + '.c'
cFile.content = t.render(context)
# Write setup.py.
t = Template(resource_string(__name__, 'template/setup.jinja.py'))
setupFile = response.file.add()
setupFile.name = 'setup.py'
setupFile.content = t.render({'files': files})
sys.stdout.write(response.SerializeToString())
if __name__ == '__main__':
main()
|
fae5db20daa1e7bcb1b915ce7f3ca84ae8bd4a1f
|
client/scripts/install-plugin.py
|
client/scripts/install-plugin.py
|
import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
plugin_folder = 'built_plugin/%s' % plugin_version
install_plugin(project_file, plugin_folder)
|
import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
cur_dir = os.path.dirname(os.path.abspath(__file__))
plugin_folder = os.path.join(cur_dir, 'built_plugin/%s' % plugin_version)
install_plugin(project_file, plugin_folder)
|
Update relative path with respect to __file__
|
Update relative path with respect to __file__
|
Python
|
mit
|
qiuwch/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv
|
import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
plugin_folder = 'built_plugin/%s' % plugin_version
install_plugin(project_file, plugin_folder)
Update relative path with respect to __file__
|
import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
cur_dir = os.path.dirname(os.path.abspath(__file__))
plugin_folder = os.path.join(cur_dir, 'built_plugin/%s' % plugin_version)
install_plugin(project_file, plugin_folder)
|
<commit_before>import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
plugin_folder = 'built_plugin/%s' % plugin_version
install_plugin(project_file, plugin_folder)
<commit_msg>Update relative path with respect to __file__<commit_after>
|
import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
cur_dir = os.path.dirname(os.path.abspath(__file__))
plugin_folder = os.path.join(cur_dir, 'built_plugin/%s' % plugin_version)
install_plugin(project_file, plugin_folder)
|
import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
plugin_folder = 'built_plugin/%s' % plugin_version
install_plugin(project_file, plugin_folder)
Update relative path with respect to __file__import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
cur_dir = os.path.dirname(os.path.abspath(__file__))
plugin_folder = os.path.join(cur_dir, 'built_plugin/%s' % plugin_version)
install_plugin(project_file, plugin_folder)
|
<commit_before>import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
plugin_folder = 'built_plugin/%s' % plugin_version
install_plugin(project_file, plugin_folder)
<commit_msg>Update relative path with respect to __file__<commit_after>import argparse, shutil, os
import ue4util
def install_plugin(project_file, plugin_folder):
project_folder = os.path.dirname(project_file)
install_folder = os.path.join(project_folder, 'Plugins', 'unrealcv')
if os.path.isdir(install_folder):
shutil.rmtree(install_folder) # Complete remove old version, a little dangerous
shutil.copytree(plugin_folder, install_folder)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('plugin_version')
parser.add_argument('project_file')
args = parser.parse_args()
plugin_version = args.plugin_version
project_file = ue4util.get_real_abspath(args.project_file)
cur_dir = os.path.dirname(os.path.abspath(__file__))
plugin_folder = os.path.join(cur_dir, 'built_plugin/%s' % plugin_version)
install_plugin(project_file, plugin_folder)
|
12c4b11c6ef49e1a3adcb67217fc2feb8dbc9e4c
|
scenario/_consts.py
|
scenario/_consts.py
|
from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 10
|
from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 1
|
Revert "Update timeout to 10 seconds"
|
Revert "Update timeout to 10 seconds"
This reverts commit e7cb98a1006d292a96670a11c807d0bbf9075ebd.
|
Python
|
mit
|
shlomihod/scenario,shlomihod/scenario,shlomihod/scenario
|
from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 10Revert "Update timeout to 10 seconds"
This reverts commit e7cb98a1006d292a96670a11c807d0bbf9075ebd.
|
from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 1
|
<commit_before>from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 10<commit_msg>Revert "Update timeout to 10 seconds"
This reverts commit e7cb98a1006d292a96670a11c807d0bbf9075ebd.<commit_after>
|
from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 1
|
from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 10Revert "Update timeout to 10 seconds"
This reverts commit e7cb98a1006d292a96670a11c807d0bbf9075ebd.from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 1
|
<commit_before>from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 10<commit_msg>Revert "Update timeout to 10 seconds"
This reverts commit e7cb98a1006d292a96670a11c807d0bbf9075ebd.<commit_after>from collections import OrderedDict
ACTORS = list('NRAIOVF')
FILE_COMMANDS = ['copy', 'compare']
VERBOSITY = OrderedDict(
[ ('RETURN_CODE', 0),
('RESULT' , 1),
('ERROR' , 2),
('EXECUTION' , 3),
('DEBUG' , 4),
])
VERBOSITY_DEFAULT = VERBOSITY['RESULT']
TIMEOUT_DEFAULT = 1
|
1d1e8fb72fe578adb871d22accdde60bedff48c6
|
housemarket/housesales/management/commands/fillsalesdb.py
|
housemarket/housesales/management/commands/fillsalesdb.py
|
from django.core.management.base import BaseCommand
class Command(BaseCommand):
args = ''
help = ('Load house sales data from a CSV and save it into DB')
def handle(self, *args, **options):
pass
|
from django.core.management.base import BaseCommand
from django.db import transaction
from housesales.models import HouseSales
import csv
from datetime import datetime
class Command(BaseCommand):
help = ('Load house sales data from a CSV and save it into DB')
def add_arguments(self, parser):
parser.add_argument('--csv', type=str)
@transaction.atomic
def handle(self, *args, **options):
if options['csv']:
file_name = options['csv']
print 'Filling house sales db with: {0}'.format(file_name)
with open(file_name, 'rU') as f:
reader = csv.reader(f)
housesales = []
for row in reader:
hs = HouseSales()
hs.transaction_id = row[0][1:len(row[0]) -1]
hs.price = int(row[1])
hs.date_of_transfer = datetime.strptime(row[2], '%Y-%m-%d %H:%M').date()
hs.postcode = row[3]
hs.property_type = row[4]
hs.old_new = row[5]
hs.duration = row[6]
hs.paon = row[7]
hs.saon = row[8]
hs.street = row[9]
hs.locality = row[10]
hs.town_city = row[11]
hs.district = row[12]
hs.county = row[13]
hs.status = row[14]
housesales.append(hs)
HouseSales.objects.bulk_create(housesales)
|
Improve performance of db import utility
|
Improve performance of db import utility
|
Python
|
mit
|
andreagrandi/sold-house-prices
|
from django.core.management.base import BaseCommand
class Command(BaseCommand):
args = ''
help = ('Load house sales data from a CSV and save it into DB')
def handle(self, *args, **options):
pass
Improve performance of db import utility
|
from django.core.management.base import BaseCommand
from django.db import transaction
from housesales.models import HouseSales
import csv
from datetime import datetime
class Command(BaseCommand):
help = ('Load house sales data from a CSV and save it into DB')
def add_arguments(self, parser):
parser.add_argument('--csv', type=str)
@transaction.atomic
def handle(self, *args, **options):
if options['csv']:
file_name = options['csv']
print 'Filling house sales db with: {0}'.format(file_name)
with open(file_name, 'rU') as f:
reader = csv.reader(f)
housesales = []
for row in reader:
hs = HouseSales()
hs.transaction_id = row[0][1:len(row[0]) -1]
hs.price = int(row[1])
hs.date_of_transfer = datetime.strptime(row[2], '%Y-%m-%d %H:%M').date()
hs.postcode = row[3]
hs.property_type = row[4]
hs.old_new = row[5]
hs.duration = row[6]
hs.paon = row[7]
hs.saon = row[8]
hs.street = row[9]
hs.locality = row[10]
hs.town_city = row[11]
hs.district = row[12]
hs.county = row[13]
hs.status = row[14]
housesales.append(hs)
HouseSales.objects.bulk_create(housesales)
|
<commit_before>from django.core.management.base import BaseCommand
class Command(BaseCommand):
args = ''
help = ('Load house sales data from a CSV and save it into DB')
def handle(self, *args, **options):
pass
<commit_msg>Improve performance of db import utility<commit_after>
|
from django.core.management.base import BaseCommand
from django.db import transaction
from housesales.models import HouseSales
import csv
from datetime import datetime
class Command(BaseCommand):
help = ('Load house sales data from a CSV and save it into DB')
def add_arguments(self, parser):
parser.add_argument('--csv', type=str)
@transaction.atomic
def handle(self, *args, **options):
if options['csv']:
file_name = options['csv']
print 'Filling house sales db with: {0}'.format(file_name)
with open(file_name, 'rU') as f:
reader = csv.reader(f)
housesales = []
for row in reader:
hs = HouseSales()
hs.transaction_id = row[0][1:len(row[0]) -1]
hs.price = int(row[1])
hs.date_of_transfer = datetime.strptime(row[2], '%Y-%m-%d %H:%M').date()
hs.postcode = row[3]
hs.property_type = row[4]
hs.old_new = row[5]
hs.duration = row[6]
hs.paon = row[7]
hs.saon = row[8]
hs.street = row[9]
hs.locality = row[10]
hs.town_city = row[11]
hs.district = row[12]
hs.county = row[13]
hs.status = row[14]
housesales.append(hs)
HouseSales.objects.bulk_create(housesales)
|
from django.core.management.base import BaseCommand
class Command(BaseCommand):
args = ''
help = ('Load house sales data from a CSV and save it into DB')
def handle(self, *args, **options):
pass
Improve performance of db import utilityfrom django.core.management.base import BaseCommand
from django.db import transaction
from housesales.models import HouseSales
import csv
from datetime import datetime
class Command(BaseCommand):
help = ('Load house sales data from a CSV and save it into DB')
def add_arguments(self, parser):
parser.add_argument('--csv', type=str)
@transaction.atomic
def handle(self, *args, **options):
if options['csv']:
file_name = options['csv']
print 'Filling house sales db with: {0}'.format(file_name)
with open(file_name, 'rU') as f:
reader = csv.reader(f)
housesales = []
for row in reader:
hs = HouseSales()
hs.transaction_id = row[0][1:len(row[0]) -1]
hs.price = int(row[1])
hs.date_of_transfer = datetime.strptime(row[2], '%Y-%m-%d %H:%M').date()
hs.postcode = row[3]
hs.property_type = row[4]
hs.old_new = row[5]
hs.duration = row[6]
hs.paon = row[7]
hs.saon = row[8]
hs.street = row[9]
hs.locality = row[10]
hs.town_city = row[11]
hs.district = row[12]
hs.county = row[13]
hs.status = row[14]
housesales.append(hs)
HouseSales.objects.bulk_create(housesales)
|
<commit_before>from django.core.management.base import BaseCommand
class Command(BaseCommand):
args = ''
help = ('Load house sales data from a CSV and save it into DB')
def handle(self, *args, **options):
pass
<commit_msg>Improve performance of db import utility<commit_after>from django.core.management.base import BaseCommand
from django.db import transaction
from housesales.models import HouseSales
import csv
from datetime import datetime
class Command(BaseCommand):
help = ('Load house sales data from a CSV and save it into DB')
def add_arguments(self, parser):
parser.add_argument('--csv', type=str)
@transaction.atomic
def handle(self, *args, **options):
if options['csv']:
file_name = options['csv']
print 'Filling house sales db with: {0}'.format(file_name)
with open(file_name, 'rU') as f:
reader = csv.reader(f)
housesales = []
for row in reader:
hs = HouseSales()
hs.transaction_id = row[0][1:len(row[0]) -1]
hs.price = int(row[1])
hs.date_of_transfer = datetime.strptime(row[2], '%Y-%m-%d %H:%M').date()
hs.postcode = row[3]
hs.property_type = row[4]
hs.old_new = row[5]
hs.duration = row[6]
hs.paon = row[7]
hs.saon = row[8]
hs.street = row[9]
hs.locality = row[10]
hs.town_city = row[11]
hs.district = row[12]
hs.county = row[13]
hs.status = row[14]
housesales.append(hs)
HouseSales.objects.bulk_create(housesales)
|
eccb17bb69384a5f9c95b1290600e8483487d6f7
|
django/contrib/comments/feeds.py
|
django/contrib/comments/feeds.py
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
Use correct m2m join table name in LatestCommentsFeed
|
Use correct m2m join table name in LatestCommentsFeed
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
|
Python
|
bsd-3-clause
|
aparo/django-nonrel,aparo/django-nonrel,FlaPer87/django-nonrel,FlaPer87/django-nonrel,FlaPer87/django-nonrel,aparo/django-nonrel
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
Use correct m2m join table name in LatestCommentsFeed
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
<commit_before>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
<commit_msg>Use correct m2m join table name in LatestCommentsFeed
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after>
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
Use correct m2m join table name in LatestCommentsFeed
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
<commit_before>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
<commit_msg>Use correct m2m join table name in LatestCommentsFeed
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
025b356ad4bbaa81ef98467d3c3abd3c8fba98b8
|
skbio/format/sequences/tests/test_fastq.py
|
skbio/format/sequences/tests/test_fastq.py
|
#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def test_format_fastq_record(self):
"""Construt a FASTQ record"""
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(b'abc', b'def',
np.array([38, 39, 40], dtype=np.int8), 64)
self.assertEqual(obs, exp)
def test_phred_to_ascii33(self):
"""Write out terrible FASTQ quality scores"""
exp = b'GHI'
obs = _phred_to_ascii33(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
def test_phred_to_ascii64(self):
"""Write out terrible FASTQ quality scores"""
exp = b'fgh'
obs = _phred_to_ascii64(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def setUp(self):
self.qual_scores = np.array([38, 39, 40], dtype=np.int8)
self.args = (b'abc', b'def', self.qual_scores)
def test_format_fastq_record_phred_offset_33(self):
exp = b"@abc\ndef\n+\nGHI\n"
obs = format_fastq_record(*self.args, phred_offset=33)
self.assertEqual(obs, exp)
def test_format_fastq_record_phred_offset_64(self):
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(*self.args, phred_offset=64)
self.assertEqual(obs, exp)
def test_format_fastq_record_invalid_phred_offset(self):
with self.assertRaises(ValueError):
format_fastq_record(*self.args, phred_offset=42)
def test_phred_to_ascii33(self):
obs = _phred_to_ascii33(self.qual_scores)
self.assertEqual(obs, b'GHI')
def test_phred_to_ascii64(self):
obs = _phred_to_ascii64(self.qual_scores)
self.assertEqual(obs, b'fgh')
if __name__ == '__main__':
main()
|
Add tests for different types of phred offsets
|
Add tests for different types of phred offsets
|
Python
|
bsd-3-clause
|
corburn/scikit-bio,anderspitman/scikit-bio,johnchase/scikit-bio,SamStudio8/scikit-bio,anderspitman/scikit-bio,averagehat/scikit-bio,wdwvt1/scikit-bio,kdmurray91/scikit-bio,demis001/scikit-bio,SamStudio8/scikit-bio,jdrudolph/scikit-bio,gregcaporaso/scikit-bio,corburn/scikit-bio,johnchase/scikit-bio,jairideout/scikit-bio,Kleptobismol/scikit-bio,demis001/scikit-bio,Kleptobismol/scikit-bio,xguse/scikit-bio,Kleptobismol/scikit-bio,colinbrislawn/scikit-bio,gregcaporaso/scikit-bio,jensreeder/scikit-bio,Achuth17/scikit-bio,colinbrislawn/scikit-bio,xguse/scikit-bio,kdmurray91/scikit-bio,Achuth17/scikit-bio,jairideout/scikit-bio,jdrudolph/scikit-bio,averagehat/scikit-bio,jensreeder/scikit-bio,wdwvt1/scikit-bio
|
#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def test_format_fastq_record(self):
"""Construt a FASTQ record"""
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(b'abc', b'def',
np.array([38, 39, 40], dtype=np.int8), 64)
self.assertEqual(obs, exp)
def test_phred_to_ascii33(self):
"""Write out terrible FASTQ quality scores"""
exp = b'GHI'
obs = _phred_to_ascii33(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
def test_phred_to_ascii64(self):
"""Write out terrible FASTQ quality scores"""
exp = b'fgh'
obs = _phred_to_ascii64(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
if __name__ == '__main__':
main()
Add tests for different types of phred offsets
|
#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def setUp(self):
self.qual_scores = np.array([38, 39, 40], dtype=np.int8)
self.args = (b'abc', b'def', self.qual_scores)
def test_format_fastq_record_phred_offset_33(self):
exp = b"@abc\ndef\n+\nGHI\n"
obs = format_fastq_record(*self.args, phred_offset=33)
self.assertEqual(obs, exp)
def test_format_fastq_record_phred_offset_64(self):
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(*self.args, phred_offset=64)
self.assertEqual(obs, exp)
def test_format_fastq_record_invalid_phred_offset(self):
with self.assertRaises(ValueError):
format_fastq_record(*self.args, phred_offset=42)
def test_phred_to_ascii33(self):
obs = _phred_to_ascii33(self.qual_scores)
self.assertEqual(obs, b'GHI')
def test_phred_to_ascii64(self):
obs = _phred_to_ascii64(self.qual_scores)
self.assertEqual(obs, b'fgh')
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def test_format_fastq_record(self):
"""Construt a FASTQ record"""
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(b'abc', b'def',
np.array([38, 39, 40], dtype=np.int8), 64)
self.assertEqual(obs, exp)
def test_phred_to_ascii33(self):
"""Write out terrible FASTQ quality scores"""
exp = b'GHI'
obs = _phred_to_ascii33(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
def test_phred_to_ascii64(self):
"""Write out terrible FASTQ quality scores"""
exp = b'fgh'
obs = _phred_to_ascii64(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
if __name__ == '__main__':
main()
<commit_msg>Add tests for different types of phred offsets<commit_after>
|
#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def setUp(self):
self.qual_scores = np.array([38, 39, 40], dtype=np.int8)
self.args = (b'abc', b'def', self.qual_scores)
def test_format_fastq_record_phred_offset_33(self):
exp = b"@abc\ndef\n+\nGHI\n"
obs = format_fastq_record(*self.args, phred_offset=33)
self.assertEqual(obs, exp)
def test_format_fastq_record_phred_offset_64(self):
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(*self.args, phred_offset=64)
self.assertEqual(obs, exp)
def test_format_fastq_record_invalid_phred_offset(self):
with self.assertRaises(ValueError):
format_fastq_record(*self.args, phred_offset=42)
def test_phred_to_ascii33(self):
obs = _phred_to_ascii33(self.qual_scores)
self.assertEqual(obs, b'GHI')
def test_phred_to_ascii64(self):
obs = _phred_to_ascii64(self.qual_scores)
self.assertEqual(obs, b'fgh')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def test_format_fastq_record(self):
"""Construt a FASTQ record"""
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(b'abc', b'def',
np.array([38, 39, 40], dtype=np.int8), 64)
self.assertEqual(obs, exp)
def test_phred_to_ascii33(self):
"""Write out terrible FASTQ quality scores"""
exp = b'GHI'
obs = _phred_to_ascii33(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
def test_phred_to_ascii64(self):
"""Write out terrible FASTQ quality scores"""
exp = b'fgh'
obs = _phred_to_ascii64(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
if __name__ == '__main__':
main()
Add tests for different types of phred offsets#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def setUp(self):
self.qual_scores = np.array([38, 39, 40], dtype=np.int8)
self.args = (b'abc', b'def', self.qual_scores)
def test_format_fastq_record_phred_offset_33(self):
exp = b"@abc\ndef\n+\nGHI\n"
obs = format_fastq_record(*self.args, phred_offset=33)
self.assertEqual(obs, exp)
def test_format_fastq_record_phred_offset_64(self):
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(*self.args, phred_offset=64)
self.assertEqual(obs, exp)
def test_format_fastq_record_invalid_phred_offset(self):
with self.assertRaises(ValueError):
format_fastq_record(*self.args, phred_offset=42)
def test_phred_to_ascii33(self):
obs = _phred_to_ascii33(self.qual_scores)
self.assertEqual(obs, b'GHI')
def test_phred_to_ascii64(self):
obs = _phred_to_ascii64(self.qual_scores)
self.assertEqual(obs, b'fgh')
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def test_format_fastq_record(self):
"""Construt a FASTQ record"""
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(b'abc', b'def',
np.array([38, 39, 40], dtype=np.int8), 64)
self.assertEqual(obs, exp)
def test_phred_to_ascii33(self):
"""Write out terrible FASTQ quality scores"""
exp = b'GHI'
obs = _phred_to_ascii33(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
def test_phred_to_ascii64(self):
"""Write out terrible FASTQ quality scores"""
exp = b'fgh'
obs = _phred_to_ascii64(np.array([38, 39, 40], dtype=np.int8))
self.assertEqual(obs, exp)
if __name__ == '__main__':
main()
<commit_msg>Add tests for different types of phred offsets<commit_after>#!/usr/bin/env python
import numpy as np
from unittest import TestCase, main
from skbio.format.sequences.fastq import (format_fastq_record,
_phred_to_ascii33,
_phred_to_ascii64)
class FASTQFormatTests(TestCase):
def setUp(self):
self.qual_scores = np.array([38, 39, 40], dtype=np.int8)
self.args = (b'abc', b'def', self.qual_scores)
def test_format_fastq_record_phred_offset_33(self):
exp = b"@abc\ndef\n+\nGHI\n"
obs = format_fastq_record(*self.args, phred_offset=33)
self.assertEqual(obs, exp)
def test_format_fastq_record_phred_offset_64(self):
exp = b"@abc\ndef\n+\nfgh\n"
obs = format_fastq_record(*self.args, phred_offset=64)
self.assertEqual(obs, exp)
def test_format_fastq_record_invalid_phred_offset(self):
with self.assertRaises(ValueError):
format_fastq_record(*self.args, phred_offset=42)
def test_phred_to_ascii33(self):
obs = _phred_to_ascii33(self.qual_scores)
self.assertEqual(obs, b'GHI')
def test_phred_to_ascii64(self):
obs = _phred_to_ascii64(self.qual_scores)
self.assertEqual(obs, b'fgh')
if __name__ == '__main__':
main()
|
fd2691c8971fd327bf6e4a437df6bbcfd1514bdf
|
IPython/utils/pyfile.py
|
IPython/utils/pyfile.py
|
"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in {'.pyc', '.pyo'}:
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
|
"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in ('.pyc', '.pyo'):
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
|
Fix syntax for Python 2.6 - no set literals.
|
Fix syntax for Python 2.6 - no set literals.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in {'.pyc', '.pyo'}:
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
Fix syntax for Python 2.6 - no set literals.
|
"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in ('.pyc', '.pyo'):
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
|
<commit_before>"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in {'.pyc', '.pyo'}:
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
<commit_msg>Fix syntax for Python 2.6 - no set literals.<commit_after>
|
"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in ('.pyc', '.pyo'):
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
|
"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in {'.pyc', '.pyo'}:
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
Fix syntax for Python 2.6 - no set literals."""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in ('.pyc', '.pyo'):
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
|
<commit_before>"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in {'.pyc', '.pyo'}:
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
<commit_msg>Fix syntax for Python 2.6 - no set literals.<commit_after>"""Utilities for working with Python source files.
Exposes various functions from recent Python standard libraries, along with
equivalents for older Python versions.
"""
import os.path
try: # Python 3.2
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in ('.pyc', '.pyo'):
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, ext = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'
|
89796f6cc61a2e5de18c372468ac1e91b4790085
|
test/test_get_new.py
|
test/test_get_new.py
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="project.tar.gz")
launch._get_new()
assert os.path.isfile("project.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("project.tar.gz.dump")
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="corrupted.tar.gz")
launch._get_new()
assert os.path.isfile("corrupted.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("corrupted.tar.gz.dump")
|
Update test for changed filename
|
Update test for changed filename
|
Python
|
lgpl-2.1
|
rlee287/pyautoupdate,rlee287/pyautoupdate
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="project.tar.gz")
launch._get_new()
assert os.path.isfile("project.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("project.tar.gz.dump")
Update test for changed filename
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="corrupted.tar.gz")
launch._get_new()
assert os.path.isfile("corrupted.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("corrupted.tar.gz.dump")
|
<commit_before>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="project.tar.gz")
launch._get_new()
assert os.path.isfile("project.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("project.tar.gz.dump")
<commit_msg>Update test for changed filename<commit_after>
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="corrupted.tar.gz")
launch._get_new()
assert os.path.isfile("corrupted.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("corrupted.tar.gz.dump")
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="project.tar.gz")
launch._get_new()
assert os.path.isfile("project.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("project.tar.gz.dump")
Update test for changed filenamefrom __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="corrupted.tar.gz")
launch._get_new()
assert os.path.isfile("corrupted.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("corrupted.tar.gz.dump")
|
<commit_before>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="project.tar.gz")
launch._get_new()
assert os.path.isfile("project.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("project.tar.gz.dump")
<commit_msg>Update test for changed filename<commit_after>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir, create_update_dir
import os
@needinternet
def test_check_get_new(fixture_update_dir, create_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing/')
launch._get_new()
with open(os.path.abspath(os.path.join(Launcher.updatedir,
"extradir/blah.py")), "r") as file_code:
file_text = file_code.read()
assert "new version" in file_text
assert os.path.isdir(Launcher.updatedir)
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package = fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/'
'_static/testing2/',
newfiles="corrupted.tar.gz")
launch._get_new()
assert os.path.isfile("corrupted.tar.gz.dump")
assert not os.path.isdir(Launcher.updatedir)
os.remove("corrupted.tar.gz.dump")
|
96a5388fcb8f1164db4612f4049d41a72e81ea09
|
zerver/lib/mandrill_client.py
|
zerver/lib/mandrill_client.py
|
import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT or settings.VOYAGER:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
|
import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
|
Fix hardcoded check for settings.VOYAGER.
|
mandrill: Fix hardcoded check for settings.VOYAGER.
Since this delayed sending feature is the only thing
settings.MANDRILL_API_KEY is used for, it seems reasonable for that to
be the gate as to whether we actually use Mandrill.
|
Python
|
apache-2.0
|
jainayush975/zulip,tommyip/zulip,ahmadassaf/zulip,kou/zulip,SmartPeople/zulip,dawran6/zulip,sonali0901/zulip,arpith/zulip,dhcrzf/zulip,ahmadassaf/zulip,arpith/zulip,aakash-cr7/zulip,kou/zulip,sup95/zulip,sharmaeklavya2/zulip,jrowan/zulip,grave-w-grave/zulip,SmartPeople/zulip,aakash-cr7/zulip,jainayush975/zulip,eeshangarg/zulip,niftynei/zulip,krtkmj/zulip,dattatreya303/zulip,souravbadami/zulip,amanharitsh123/zulip,hackerkid/zulip,amanharitsh123/zulip,zulip/zulip,Galexrt/zulip,amyliu345/zulip,timabbott/zulip,reyha/zulip,sup95/zulip,PhilSk/zulip,Galexrt/zulip,grave-w-grave/zulip,punchagan/zulip,dattatreya303/zulip,souravbadami/zulip,mahim97/zulip,peguin40/zulip,rht/zulip,arpith/zulip,andersk/zulip,isht3/zulip,rht/zulip,Juanvulcano/zulip,blaze225/zulip,jphilipsen05/zulip,Jianchun1/zulip,sonali0901/zulip,TigorC/zulip,dhcrzf/zulip,calvinleenyc/zulip,krtkmj/zulip,reyha/zulip,souravbadami/zulip,dhcrzf/zulip,paxapy/zulip,synicalsyntax/zulip,rishig/zulip,sup95/zulip,JPJPJPOPOP/zulip,hackerkid/zulip,dhcrzf/zulip,calvinleenyc/zulip,Galexrt/zulip,Diptanshu8/zulip,verma-varsha/zulip,zacps/zulip,paxapy/zulip,timabbott/zulip,andersk/zulip,tommyip/zulip,vikas-parashar/zulip,krtkmj/zulip,timabbott/zulip,shubhamdhama/zulip,vaidap/zulip,showell/zulip,TigorC/zulip,blaze225/zulip,jainayush975/zulip,verma-varsha/zulip,vaidap/zulip,AZtheAsian/zulip,vabs22/zulip,andersk/zulip,timabbott/zulip,rishig/zulip,hackerkid/zulip,vikas-parashar/zulip,SmartPeople/zulip,j831/zulip,brainwane/zulip,grave-w-grave/zulip,Juanvulcano/zulip,jrowan/zulip,grave-w-grave/zulip,Diptanshu8/zulip,TigorC/zulip,showell/zulip,eeshangarg/zulip,zulip/zulip,kou/zulip,rishig/zulip,brockwhittaker/zulip,synicalsyntax/zulip,arpith/zulip,vabs22/zulip,susansls/zulip,samatdav/zulip,susansls/zulip,PhilSk/zulip,niftynei/zulip,christi3k/zulip,mahim97/zulip,zulip/zulip,jainayush975/zulip,JPJPJPOPOP/zulip,j831/zulip,punchagan/zulip,vikas-parashar/zulip,showell/zulip,zacps/zulip,dattatreya303/zulip,aakash-cr7/zulip,dawran6/zulip,mohsenSy/zulip,ahmadassaf/zulip,blaze225/zulip,dhcrzf/zulip,hackerkid/zulip,reyha/zulip,brainwane/zulip,TigorC/zulip,andersk/zulip,susansls/zulip,Juanvulcano/zulip,showell/zulip,sonali0901/zulip,sup95/zulip,brockwhittaker/zulip,showell/zulip,vaidap/zulip,sup95/zulip,AZtheAsian/zulip,rht/zulip,brainwane/zulip,sonali0901/zulip,Juanvulcano/zulip,isht3/zulip,eeshangarg/zulip,andersk/zulip,dhcrzf/zulip,ryanbackman/zulip,christi3k/zulip,Juanvulcano/zulip,andersk/zulip,shubhamdhama/zulip,mahim97/zulip,punchagan/zulip,KingxBanana/zulip,isht3/zulip,KingxBanana/zulip,vaidap/zulip,jackrzhang/zulip,PhilSk/zulip,brockwhittaker/zulip,tommyip/zulip,vabs22/zulip,rishig/zulip,umkay/zulip,AZtheAsian/zulip,kou/zulip,andersk/zulip,jackrzhang/zulip,showell/zulip,peguin40/zulip,vikas-parashar/zulip,krtkmj/zulip,timabbott/zulip,jphilipsen05/zulip,jrowan/zulip,brockwhittaker/zulip,ahmadassaf/zulip,sup95/zulip,dattatreya303/zulip,SmartPeople/zulip,joyhchen/zulip,JPJPJPOPOP/zulip,zacps/zulip,Galexrt/zulip,hackerkid/zulip,jackrzhang/zulip,j831/zulip,brainwane/zulip,rishig/zulip,Diptanshu8/zulip,SmartPeople/zulip,arpith/zulip,amyliu345/zulip,shubhamdhama/zulip,synicalsyntax/zulip,rishig/zulip,niftynei/zulip,Jianchun1/zulip,Jianchun1/zulip,dawran6/zulip,jphilipsen05/zulip,ryanbackman/zulip,Diptanshu8/zulip,punchagan/zulip,ryanbackman/zulip,ryanbackman/zulip,samatdav/zulip,jackrzhang/zulip,shubhamdhama/zulip,tommyip/zulip,cosmicAsymmetry/zulip,grave-w-grave/zulip,sonali0901/zulip,dawran6/zulip,brainwane/zulip,niftynei/zulip,jackrzhang/zulip,sonali0901/zulip,mahim97/zulip,brainwane/zulip,Juanvulcano/zulip,zacps/zulip,aakash-cr7/zulip,calvinleenyc/zulip,sharmaeklavya2/zulip,shubhamdhama/zulip,vaidap/zulip,umkay/zulip,dawran6/zulip,zulip/zulip,punchagan/zulip,dhcrzf/zulip,synicalsyntax/zulip,peguin40/zulip,Galexrt/zulip,verma-varsha/zulip,JPJPJPOPOP/zulip,tommyip/zulip,reyha/zulip,joyhchen/zulip,grave-w-grave/zulip,vikas-parashar/zulip,souravbadami/zulip,arpith/zulip,verma-varsha/zulip,synicalsyntax/zulip,cosmicAsymmetry/zulip,paxapy/zulip,blaze225/zulip,rht/zulip,susansls/zulip,j831/zulip,susansls/zulip,joyhchen/zulip,umkay/zulip,eeshangarg/zulip,umkay/zulip,vabs22/zulip,samatdav/zulip,aakash-cr7/zulip,Jianchun1/zulip,mohsenSy/zulip,rht/zulip,mahim97/zulip,calvinleenyc/zulip,vabs22/zulip,amanharitsh123/zulip,amanharitsh123/zulip,jrowan/zulip,joyhchen/zulip,paxapy/zulip,PhilSk/zulip,rishig/zulip,synicalsyntax/zulip,peguin40/zulip,vabs22/zulip,cosmicAsymmetry/zulip,zulip/zulip,mohsenSy/zulip,cosmicAsymmetry/zulip,KingxBanana/zulip,jrowan/zulip,mohsenSy/zulip,brockwhittaker/zulip,punchagan/zulip,joyhchen/zulip,AZtheAsian/zulip,tommyip/zulip,PhilSk/zulip,vikas-parashar/zulip,peguin40/zulip,jphilipsen05/zulip,ahmadassaf/zulip,samatdav/zulip,hackerkid/zulip,rht/zulip,j831/zulip,peguin40/zulip,tommyip/zulip,PhilSk/zulip,timabbott/zulip,amyliu345/zulip,jphilipsen05/zulip,shubhamdhama/zulip,ahmadassaf/zulip,souravbadami/zulip,ahmadassaf/zulip,sharmaeklavya2/zulip,jrowan/zulip,JPJPJPOPOP/zulip,amanharitsh123/zulip,AZtheAsian/zulip,zacps/zulip,umkay/zulip,kou/zulip,synicalsyntax/zulip,ryanbackman/zulip,sharmaeklavya2/zulip,verma-varsha/zulip,amyliu345/zulip,Galexrt/zulip,Jianchun1/zulip,christi3k/zulip,krtkmj/zulip,aakash-cr7/zulip,dawran6/zulip,AZtheAsian/zulip,jainayush975/zulip,Jianchun1/zulip,hackerkid/zulip,isht3/zulip,Diptanshu8/zulip,reyha/zulip,brockwhittaker/zulip,umkay/zulip,reyha/zulip,calvinleenyc/zulip,ryanbackman/zulip,sharmaeklavya2/zulip,jphilipsen05/zulip,eeshangarg/zulip,paxapy/zulip,dattatreya303/zulip,vaidap/zulip,brainwane/zulip,JPJPJPOPOP/zulip,isht3/zulip,shubhamdhama/zulip,cosmicAsymmetry/zulip,rht/zulip,niftynei/zulip,krtkmj/zulip,showell/zulip,samatdav/zulip,TigorC/zulip,mohsenSy/zulip,samatdav/zulip,verma-varsha/zulip,mohsenSy/zulip,blaze225/zulip,jackrzhang/zulip,amyliu345/zulip,kou/zulip,souravbadami/zulip,kou/zulip,KingxBanana/zulip,dattatreya303/zulip,zulip/zulip,amyliu345/zulip,christi3k/zulip,TigorC/zulip,zulip/zulip,zacps/zulip,christi3k/zulip,cosmicAsymmetry/zulip,niftynei/zulip,jainayush975/zulip,timabbott/zulip,calvinleenyc/zulip,Galexrt/zulip,blaze225/zulip,KingxBanana/zulip,krtkmj/zulip,joyhchen/zulip,jackrzhang/zulip,mahim97/zulip,SmartPeople/zulip,isht3/zulip,christi3k/zulip,j831/zulip,eeshangarg/zulip,Diptanshu8/zulip,susansls/zulip,punchagan/zulip,paxapy/zulip,sharmaeklavya2/zulip,amanharitsh123/zulip,KingxBanana/zulip,umkay/zulip,eeshangarg/zulip
|
import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT or settings.VOYAGER:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
mandrill: Fix hardcoded check for settings.VOYAGER.
Since this delayed sending feature is the only thing
settings.MANDRILL_API_KEY is used for, it seems reasonable for that to
be the gate as to whether we actually use Mandrill.
|
import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
|
<commit_before>import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT or settings.VOYAGER:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
<commit_msg>mandrill: Fix hardcoded check for settings.VOYAGER.
Since this delayed sending feature is the only thing
settings.MANDRILL_API_KEY is used for, it seems reasonable for that to
be the gate as to whether we actually use Mandrill.<commit_after>
|
import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
|
import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT or settings.VOYAGER:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
mandrill: Fix hardcoded check for settings.VOYAGER.
Since this delayed sending feature is the only thing
settings.MANDRILL_API_KEY is used for, it seems reasonable for that to
be the gate as to whether we actually use Mandrill.import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
|
<commit_before>import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT or settings.VOYAGER:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
<commit_msg>mandrill: Fix hardcoded check for settings.VOYAGER.
Since this delayed sending feature is the only thing
settings.MANDRILL_API_KEY is used for, it seems reasonable for that to
be the gate as to whether we actually use Mandrill.<commit_after>import mandrill
from django.conf import settings
MAIL_CLIENT = None
from typing import Optional
def get_mandrill_client():
# type: () -> Optional[mandrill.Mandrill]
if settings.MANDRILL_API_KEY == '' or settings.DEVELOPMENT:
return None
global MAIL_CLIENT
if not MAIL_CLIENT:
MAIL_CLIENT = mandrill.Mandrill(settings.MANDRILL_API_KEY)
return MAIL_CLIENT
|
02854d24db2418fbbd7be399d0abcb10a691810f
|
test_bert_trainer.py
|
test_bert_trainer.py
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
loss1, loss2 = results['loss'], results2['loss']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
Fix merge conflict and also check for equal eval loss
|
Fix merge conflict and also check for equal eval loss
|
Python
|
apache-2.0
|
googleinterns/smart-news-query-embeddings,googleinterns/smart-news-query-embeddings
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
Fix merge conflict and also check for equal eval loss
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
loss1, loss2 = results['loss'], results2['loss']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix merge conflict and also check for equal eval loss<commit_after>
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
loss1, loss2 = results['loss'], results2['loss']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
Fix merge conflict and also check for equal eval lossimport unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
loss1, loss2 = results['loss'], results2['loss']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix merge conflict and also check for equal eval loss<commit_after>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
self.assertEqual(eval_acc1, eval_acc2)
loss1, loss2 = results['loss'], results2['loss']
self.assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
8bbd35e4efb308246961a7f4b55061be95c713f3
|
tests/common/base.py
|
tests/common/base.py
|
import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
def tearDown(self):
self.dh.cleanup()
|
import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
os.environ["CF_STACK"] = "cflinuxfs2"
def tearDown(self):
self.dh.cleanup()
del os.environ["CF_STACK"]
|
Set CF_STACK environment variable in compile tests
|
Set CF_STACK environment variable in compile tests
|
Python
|
apache-2.0
|
cf-identity/php-buildpack,cf-identity/php-buildpack,nsharma283/php-new,apawar2/php-buildpack,lloydbadger/test,svrc-pivotal/php-buildpack,jsloyer/php-buildpack,hjooyang/php-buildpack,svrc-pivotal/php-buildpack,jan-randis/php-db2-mysql-buildpack,svrc-pivotal/php-buildpack,ashish17das/cf-php-build,hjooyang/php-buildpack,LeonidLe/php-buildpack,cf-identity/php-buildpack,nsharma283/php-buildpack,lloydbadger/test,hpcloud/php-buildpack,chregu/cf-php-varnish-buildpack,UNINETT/php-buildpack,hpcloud/php-buildpack,mc500/php-buildpack,apawar2/php-buildpack,nsharma283/php-new,nsharma283/PCF-php-buildpack,UNINETT/php-buildpack,chregu/cf-php-varnish-buildpack,ashish17das/cf-php-build,svennam92/php-buildpack,apawar2/php-buildpack,nsharma283/php-new,ashish17das/cf-php-build,apawar2/php-buildpack,nsharma283/PCF-php-buildpack,cf-identity/php-buildpack,arjankroontolsma/post2,nsharma283/php-buildpack,aronreid/cf-php-buildpack,jan-randis/php-db2-mysql-buildpack,cloudfoundry/php-buildpack,ArthurHlt/php-buildpack,zizhongwei/php-buildpack-master,aronreid/cf-php-buildpack,chregu/cf-php-varnish-buildpack,jsloyer/php-buildpack,skeeso/php-buildpack,chregu/cf-php-varnish-buildpack,skeeso/php-buildpack,nsharma283/php-buildpack,zhangyanfa/yanfaz-php-buildpack,chregu/cf-php-varnish-buildpack,aronreid/cf-php-buildpack,arjankroontolsma/post2,zhangyanfa/yanfaz-php-buildpack,ArthurHlt/php-buildpack,mc500/php-buildpack,aronreid/cf-php-buildpack,ArthurHlt/php-buildpack,hpcloud/php-buildpack,zhangyanfa/yanfaz-php-buildpack,UNINETT/php-buildpack,mc500/php-buildpack,tgemal00/php-buildpack,mc500/php-buildpack,aronreid/cf-php-buildpack,aronreid/cf-php-buildpack,nkatre/php-buildpack,cloudfoundry/php-buildpack,arjankroontolsma/post2,zizhongwei/php-buildpack-master,arjankroontolsma/post2,nkatre/php-buildpack,jsloyer/php-buildpack,nsharma283/PCF-php-buildpack,svennam92/php-buildpack,svrc-pivotal/php-buildpack,svennam92/php-buildpack,nsharma283/php-buildpack,nsharma283/php-new,LeonidLe/php-buildpack,jan-randis/php-db2-mysql-buildpack,zizhongwei/php-buildpack-master,nkatre/php-buildpack,skeeso/php-buildpack,jsloyer/php-buildpack,cloudfoundry/php-buildpack,lloydbadger/test,skeeso/php-buildpack,lloydbadger/test,zhangyanfa/yanfaz-php-buildpack,apawar2/php-buildpack,jsloyer/php-buildpack,nkatre/php-buildpack,nsharma283/PCF-php-buildpack,zhangyanfa/yanfaz-php-buildpack,nsharma283/php-new,arjankroontolsma/post2,tgemal00/php-buildpack,Orange-OpenSource/cf-php-build-pack,LeonidLe/php-buildpack,hjooyang/php-buildpack,UNINETT/php-buildpack,svennam92/php-buildpack,nkatre/php-buildpack,chregu/cf-php-varnish-buildpack,arjankroontolsma/post2,hjooyang/php-buildpack,cf-identity/php-buildpack,ArthurHlt/php-buildpack,zizhongwei/php-buildpack-master,jan-randis/php-db2-mysql-buildpack,nsharma283/PCF-php-buildpack,skeeso/php-buildpack,ArthurHlt/php-buildpack,nkatre/php-buildpack,tgemal00/php-buildpack,tgemal00/php-buildpack,Orange-OpenSource/cf-php-build-pack,hpcloud/php-buildpack,jan-randis/php-db2-mysql-buildpack,UNINETT/php-buildpack,svrc-pivotal/php-buildpack,LeonidLe/php-buildpack,mc500/php-buildpack,UNINETT/php-buildpack,hjooyang/php-buildpack,apawar2/php-buildpack,tgemal00/php-buildpack,mc500/php-buildpack,zizhongwei/php-buildpack-master,nsharma283/php-buildpack,zizhongwei/php-buildpack-master,LeonidLe/php-buildpack,svennam92/php-buildpack,jan-randis/php-db2-mysql-buildpack,hpcloud/php-buildpack,ashish17das/cf-php-build,skeeso/php-buildpack,LeonidLe/php-buildpack,cloudfoundry/php-buildpack,ArthurHlt/php-buildpack,cf-identity/php-buildpack,Orange-OpenSource/cf-php-build-pack,nsharma283/PCF-php-buildpack,hjooyang/php-buildpack,lloydbadger/test,svennam92/php-buildpack,zhangyanfa/yanfaz-php-buildpack,lloydbadger/test,hpcloud/php-buildpack,svrc-pivotal/php-buildpack,cloudfoundry/php-buildpack,cloudfoundry/php-buildpack,cloudfoundry/php-buildpack,tgemal00/php-buildpack,jsloyer/php-buildpack,cf-identity/php-buildpack,nsharma283/php-buildpack,ashish17das/cf-php-build,ashish17das/cf-php-build,nsharma283/php-new
|
import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
def tearDown(self):
self.dh.cleanup()
Set CF_STACK environment variable in compile tests
|
import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
os.environ["CF_STACK"] = "cflinuxfs2"
def tearDown(self):
self.dh.cleanup()
del os.environ["CF_STACK"]
|
<commit_before>import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
def tearDown(self):
self.dh.cleanup()
<commit_msg>Set CF_STACK environment variable in compile tests<commit_after>
|
import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
os.environ["CF_STACK"] = "cflinuxfs2"
def tearDown(self):
self.dh.cleanup()
del os.environ["CF_STACK"]
|
import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
def tearDown(self):
self.dh.cleanup()
Set CF_STACK environment variable in compile testsimport os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
os.environ["CF_STACK"] = "cflinuxfs2"
def tearDown(self):
self.dh.cleanup()
del os.environ["CF_STACK"]
|
<commit_before>import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
def tearDown(self):
self.dh.cleanup()
<commit_msg>Set CF_STACK environment variable in compile tests<commit_after>import os
from build_pack_utils import BuildPack
from common.integration import DirectoryHelper
from common.integration import OptionsHelper
class BaseCompileApp(object):
def setUp(self):
self.dh = DirectoryHelper()
(self.build_dir,
self.cache_dir,
self.temp_dir) = self.dh.create_bp_env(self.app_name)
self.bp = BuildPack({
'BUILD_DIR': self.build_dir,
'CACHE_DIR': self.cache_dir,
'TMPDIR': self.temp_dir
}, '.')
if 'BP_DEBUG' in os.environ.keys():
self.bp._ctx['BP_DEBUG'] = True
self.dh.copy_build_pack_to(self.bp.bp_dir)
self.dh.register_to_delete(self.bp.bp_dir)
self.opts = OptionsHelper(os.path.join(self.bp.bp_dir,
'defaults',
'options.json'))
self.opts.set_download_url(
'http://localhost:5000/binaries/{STACK}')
os.environ["CF_STACK"] = "cflinuxfs2"
def tearDown(self):
self.dh.cleanup()
del os.environ["CF_STACK"]
|
f1d9c010b58d69cdcf8f55a3e5937cbdf58c10e6
|
tools/corintick_dump.py
|
tools/corintick_dump.py
|
#!/usr/bin/env python
import argparse
import glob
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(args.files)
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to download')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
#!/usr/bin/env python
import argparse
import glob
import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
Fix help docstring and glob parsing
|
Fix help docstring and glob parsing
|
Python
|
mit
|
plugaai/pytrthree
|
#!/usr/bin/env python
import argparse
import glob
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(args.files)
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to download')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
Fix help docstring and glob parsing
|
#!/usr/bin/env python
import argparse
import glob
import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
<commit_before>#!/usr/bin/env python
import argparse
import glob
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(args.files)
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to download')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
<commit_msg>Fix help docstring and glob parsing<commit_after>
|
#!/usr/bin/env python
import argparse
import glob
import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
#!/usr/bin/env python
import argparse
import glob
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(args.files)
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to download')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
Fix help docstring and glob parsing#!/usr/bin/env python
import argparse
import glob
import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
<commit_before>#!/usr/bin/env python
import argparse
import glob
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(args.files)
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to download')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
<commit_msg>Fix help docstring and glob parsing<commit_after>#!/usr/bin/env python
import argparse
import glob
import os
from pytrthree import TRTHIterator
from corintick import Corintick, ValidationError
def main(args):
db = Corintick(args.config)
files = glob.glob(os.path.expanduser(args.files))
for ric, df in TRTHIterator(files):
cols = args.columns if args.columns else df.columns
try:
db.write(ric, df[cols], collection=args.collection)
except ValidationError as e:
db.logger.error(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse TRTH files and insert into Corintick.')
parser.add_argument('--config', type=argparse.FileType('r'), required=True,
help='Corintick configuration (YAML file)')
parser.add_argument('--files', type=str, default='*', required=True,
help='Glob of files to insert')
parser.add_argument('--columns', nargs='*', type=str,
help='Columns to be inserted (optional)')
parser.add_argument('--collection', type=str, default=None,
help='Collection to insert to (optional)')
args = parser.parse_args()
main(args)
|
c111410ad8feb6347e1e493c19b32ff9e8230306
|
zmon_aws_agent/common.py
|
zmon_aws_agent/common.py
|
import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling':
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
|
import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling' or \
'RequestLimitExceeded' in str(e):
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
|
Handle RequestLimitExceeded the same way as Throttling response
|
Handle RequestLimitExceeded the same way as Throttling response
|
Python
|
apache-2.0
|
zalando/zmon-aws-agent,zalando/zmon-aws-agent
|
import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling':
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
Handle RequestLimitExceeded the same way as Throttling response
|
import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling' or \
'RequestLimitExceeded' in str(e):
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
|
<commit_before>import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling':
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
<commit_msg>Handle RequestLimitExceeded the same way as Throttling response<commit_after>
|
import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling' or \
'RequestLimitExceeded' in str(e):
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
|
import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling':
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
Handle RequestLimitExceeded the same way as Throttling responseimport time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling' or \
'RequestLimitExceeded' in str(e):
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
|
<commit_before>import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling':
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
<commit_msg>Handle RequestLimitExceeded the same way as Throttling response<commit_after>import time
import logging
from botocore.exceptions import ClientError
from zmon_aws_agent import __version__
MAX_RETRIES = 10
TIME_OUT = 0.5
logger = logging.getLogger(__name__)
def get_user_agent():
return 'zmon-aws-agent/{}'.format(__version__)
def get_sleep_duration(retries):
return 2 ** retries * TIME_OUT
def call_and_retry(fn, *args, **kwargs):
"""Call `fn` and retry in case of API Throttling exception."""
count = 0
while True:
try:
return fn(*args, **kwargs)
except ClientError as e:
if e.response['Error']['Code'] == 'Throttling' or \
'RequestLimitExceeded' in str(e):
if count < MAX_RETRIES:
logger.info('Throttling AWS API requests...')
time.sleep(get_sleep_duration(count))
count += 1
continue
raise
|
9adb52b4a3295afcaaa4c830835d42ce0bbbb03e
|
udemy/missingelement.py
|
udemy/missingelement.py
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
def finder3(l1, l2):
"""
Find the missing element in a non-python specific approach in constant
space complexity.
"""
result = 0
for num in l1 + l2:
result ^= num
return result
|
Add XOR approach for finding missing element
|
Add XOR approach for finding missing element
Add approach for finding the missing element in the second list by performing a series of XOR operations.
|
Python
|
mit
|
chinhtle/python_fun
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
Add XOR approach for finding missing element
Add approach for finding the missing element in the second list by performing a series of XOR operations.
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
def finder3(l1, l2):
"""
Find the missing element in a non-python specific approach in constant
space complexity.
"""
result = 0
for num in l1 + l2:
result ^= num
return result
|
<commit_before>import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
<commit_msg>Add XOR approach for finding missing element
Add approach for finding the missing element in the second list by performing a series of XOR operations.<commit_after>
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
def finder3(l1, l2):
"""
Find the missing element in a non-python specific approach in constant
space complexity.
"""
result = 0
for num in l1 + l2:
result ^= num
return result
|
import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
Add XOR approach for finding missing element
Add approach for finding the missing element in the second list by performing a series of XOR operations.import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
def finder3(l1, l2):
"""
Find the missing element in a non-python specific approach in constant
space complexity.
"""
result = 0
for num in l1 + l2:
result ^= num
return result
|
<commit_before>import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
<commit_msg>Add XOR approach for finding missing element
Add approach for finding the missing element in the second list by performing a series of XOR operations.<commit_after>import collections
# Problem:
# Consider an array of non-negative integers. A second array is formed
# by shuffling the elements of the first array and deleting a random element.
# Given these two arrays, find which element is missing in the second array.
#
# Assume there will always be one missing element in the second list.
#
# Example:
# The first array is shuffled and the number 5 is removed to construct the
# second array.
#
# Input:
# finder([1,2,3,4,5,6,7], [3,7,2,1,4,6])
#
# Output:
# 5
def finder(l1, l2):
"""
Find the missing element using the sum of two lists. Need to be careful
of overflows. Using the built-in sum function for this.
"""
return sum(l1) - sum(l2)
def finder2(l1, l2):
"""
Find the missing element in a non-python specific approach.
"""
count = collections.defaultdict(int)
for num in l2:
count[num] += 1
for num in l1:
if count[num] == 0:
return num
else:
count[num] -= 1
return None
def finder3(l1, l2):
"""
Find the missing element in a non-python specific approach in constant
space complexity.
"""
result = 0
for num in l1 + l2:
result ^= num
return result
|
30aa7dce0561e1fd8beeec94098a5d6a6f447a65
|
src/test.py
|
src/test.py
|
#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
print "Real coefficients:", koeffs
print "Fitted coefficients:", fit
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
|
Print real and fitted coeffs
|
Print real and fitted coeffs
|
Python
|
mit
|
bbci/playground
|
#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
Print real and fitted coeffs
|
#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
print "Real coefficients:", koeffs
print "Fitted coefficients:", fit
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
<commit_msg>Print real and fitted coeffs<commit_after>
|
#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
print "Real coefficients:", koeffs
print "Fitted coefficients:", fit
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
Print real and fitted coeffs#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
print "Real coefficients:", koeffs
print "Fitted coefficients:", fit
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
<commit_msg>Print real and fitted coeffs<commit_after>#!/usr/bin/env python
from __future__ import division
import numpy as np
from matplotlib import pyplot as plt
def main():
koeffs = [.3, 1.2, .1, 7]
p = np.poly1d(koeffs)
x = np.linspace(-2, 2, 100)
y = p(x) + 2 * np.random.randn(100) - 1
# fit
fit = np.polyfit(x, y, 3)
p_fit = np.poly1d(fit)
print "Real coefficients:", koeffs
print "Fitted coefficients:", fit
# plot
plt.scatter(x, y)
plt.plot(x, p_fit(x))
plt.show()
if __name__ == '__main__':
main()
|
efed9e50dccea80cb536f106044265f8f1e2a32b
|
models.py
|
models.py
|
import peewee
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
|
import os
import peewee
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
if __name__ == '__main__':
if 'HEROKU' in os.environ:
import urlparse
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ["DATABASE_URL"])
db = PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port)
else:
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
setup_database()
|
Set up database according to environment
|
Set up database according to environment
|
Python
|
mit
|
karen/ivle-bot,karenang/ivle-bot
|
import peewee
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
Set up database according to environment
|
import os
import peewee
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
if __name__ == '__main__':
if 'HEROKU' in os.environ:
import urlparse
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ["DATABASE_URL"])
db = PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port)
else:
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
setup_database()
|
<commit_before>import peewee
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
<commit_msg>Set up database according to environment<commit_after>
|
import os
import peewee
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
if __name__ == '__main__':
if 'HEROKU' in os.environ:
import urlparse
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ["DATABASE_URL"])
db = PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port)
else:
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
setup_database()
|
import peewee
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
Set up database according to environmentimport os
import peewee
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
if __name__ == '__main__':
if 'HEROKU' in os.environ:
import urlparse
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ["DATABASE_URL"])
db = PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port)
else:
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
setup_database()
|
<commit_before>import peewee
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
<commit_msg>Set up database according to environment<commit_after>import os
import peewee
class IBModel(peewee.Model):
class Meta:
database = db
class User(IBModel):
user_id = peewee.CharField(max_length=128, primary_key=True)
auth_token = peewee.TextField()
class Module(IBModel):
module_id = peewee.TextField()
module_code = peewee.CharField(max_length=16)
acad_year = peewee.CharField(max_length=16)
semester = peewee.IntegerField()
class Meta:
primary_key = peewee.CompositeKey('module_code', 'acad_year', 'semester')
def setup_database():
db.connect()
try:
db.create_tables([User, Module])
except peewee.OperationalError as e:
print(e)
if __name__ == '__main__':
if 'HEROKU' in os.environ:
import urlparse
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ["DATABASE_URL"])
db = PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port)
else:
db = peewee.PostgresqlDatabase('ivle_bot_test', user='postgres')
setup_database()
|
32587292baab9ed1d994fc1643d4bc004832a575
|
viper/parser/grammar.py
|
viper/parser/grammar.py
|
from .languages import SPPF, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> SPPF:
lang = self.rules[rule]
return make_sppf(lang, lexemes)
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
|
from .ast import ASTNode
from .languages import ParseTreeChar, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> ASTNode:
lang = self.rules[rule]
sppf = make_sppf(lang, lexemes)
if len(sppf) == 0:
raise RuntimeError("Invalid parse.")
elif len(sppf) == 1:
child = sppf[0]
if not isinstance(child, ParseTreeChar):
raise RuntimeError(f"Invalid parse result: {child}")
result = child.token
if not isinstance(result, ASTNode):
raise RuntimeError(f"Invalid parse result: {result}")
return result
else:
raise RuntimeError("Ambiguous parse.")
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
|
Revise top-level parse function to return ASTNode
|
Revise top-level parse function to return ASTNode
|
Python
|
apache-2.0
|
pdarragh/Viper
|
from .languages import SPPF, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> SPPF:
lang = self.rules[rule]
return make_sppf(lang, lexemes)
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
Revise top-level parse function to return ASTNode
|
from .ast import ASTNode
from .languages import ParseTreeChar, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> ASTNode:
lang = self.rules[rule]
sppf = make_sppf(lang, lexemes)
if len(sppf) == 0:
raise RuntimeError("Invalid parse.")
elif len(sppf) == 1:
child = sppf[0]
if not isinstance(child, ParseTreeChar):
raise RuntimeError(f"Invalid parse result: {child}")
result = child.token
if not isinstance(result, ASTNode):
raise RuntimeError(f"Invalid parse result: {result}")
return result
else:
raise RuntimeError("Ambiguous parse.")
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
|
<commit_before>from .languages import SPPF, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> SPPF:
lang = self.rules[rule]
return make_sppf(lang, lexemes)
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
<commit_msg>Revise top-level parse function to return ASTNode<commit_after>
|
from .ast import ASTNode
from .languages import ParseTreeChar, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> ASTNode:
lang = self.rules[rule]
sppf = make_sppf(lang, lexemes)
if len(sppf) == 0:
raise RuntimeError("Invalid parse.")
elif len(sppf) == 1:
child = sppf[0]
if not isinstance(child, ParseTreeChar):
raise RuntimeError(f"Invalid parse result: {child}")
result = child.token
if not isinstance(result, ASTNode):
raise RuntimeError(f"Invalid parse result: {result}")
return result
else:
raise RuntimeError("Ambiguous parse.")
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
|
from .languages import SPPF, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> SPPF:
lang = self.rules[rule]
return make_sppf(lang, lexemes)
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
Revise top-level parse function to return ASTNodefrom .ast import ASTNode
from .languages import ParseTreeChar, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> ASTNode:
lang = self.rules[rule]
sppf = make_sppf(lang, lexemes)
if len(sppf) == 0:
raise RuntimeError("Invalid parse.")
elif len(sppf) == 1:
child = sppf[0]
if not isinstance(child, ParseTreeChar):
raise RuntimeError(f"Invalid parse result: {child}")
result = child.token
if not isinstance(result, ASTNode):
raise RuntimeError(f"Invalid parse result: {result}")
return result
else:
raise RuntimeError("Ambiguous parse.")
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
|
<commit_before>from .languages import SPPF, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> SPPF:
lang = self.rules[rule]
return make_sppf(lang, lexemes)
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
<commit_msg>Revise top-level parse function to return ASTNode<commit_after>from .ast import ASTNode
from .languages import ParseTreeChar, make_sppf
from .linguify_grammar import linguify_grammar_file
from viper.lexer import Lexeme
from os.path import join, dirname
from typing import List
class Grammar:
def __init__(self, grammar_filename: str):
self.file = grammar_filename
self.rules = linguify_grammar_file(self.file)
def parse_rule(self, rule: str, lexemes: List[Lexeme]) -> ASTNode:
lang = self.rules[rule]
sppf = make_sppf(lang, lexemes)
if len(sppf) == 0:
raise RuntimeError("Invalid parse.")
elif len(sppf) == 1:
child = sppf[0]
if not isinstance(child, ParseTreeChar):
raise RuntimeError(f"Invalid parse result: {child}")
result = child.token
if not isinstance(result, ASTNode):
raise RuntimeError(f"Invalid parse result: {result}")
return result
else:
raise RuntimeError("Ambiguous parse.")
GRAMMAR_FILE = join(dirname(__file__), join('grammar_parsing', 'formal_grammar.bnf'))
GRAMMAR = Grammar(GRAMMAR_FILE)
|
bef1e44e027284e193be889b5ca273c906ae8325
|
snippets/__main__.py
|
snippets/__main__.py
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('path')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.path)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
Make repository source optional in cli
|
Make repository source optional in cli
|
Python
|
isc
|
trilan/snippets,trilan/snippets
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('path')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.path)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
Make repository source optional in cli
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
<commit_before>import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('path')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.path)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
<commit_msg>Make repository source optional in cli<commit_after>
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('path')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.path)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
Make repository source optional in cliimport argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
<commit_before>import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('path')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.path)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
<commit_msg>Make repository source optional in cli<commit_after>import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
|
8f03f4fc5b4b321303225ec60879eb4b6a2c14f5
|
cli/cli.py
|
cli/cli.py
|
import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
parser.parse_args()
|
import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
# An run command to execute the analysis
run_parser = subparsers.add_parser('run', help='Run commands')
run_parser.add_argument('run_commands', help='Run analytics based on argument', nargs='?', default='basic')
if __name__ == '__main__':
args = parser.parse_args()
print args
|
Add subparser for run analytics commands
|
Add subparser for run analytics commands
|
Python
|
mit
|
McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research
|
import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
parser.parse_args()
Add subparser for run analytics commands
|
import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
# An run command to execute the analysis
run_parser = subparsers.add_parser('run', help='Run commands')
run_parser.add_argument('run_commands', help='Run analytics based on argument', nargs='?', default='basic')
if __name__ == '__main__':
args = parser.parse_args()
print args
|
<commit_before>import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
parser.parse_args()
<commit_msg>Add subparser for run analytics commands<commit_after>
|
import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
# An run command to execute the analysis
run_parser = subparsers.add_parser('run', help='Run commands')
run_parser.add_argument('run_commands', help='Run analytics based on argument', nargs='?', default='basic')
if __name__ == '__main__':
args = parser.parse_args()
print args
|
import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
parser.parse_args()
Add subparser for run analytics commandsimport argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
# An run command to execute the analysis
run_parser = subparsers.add_parser('run', help='Run commands')
run_parser.add_argument('run_commands', help='Run analytics based on argument', nargs='?', default='basic')
if __name__ == '__main__':
args = parser.parse_args()
print args
|
<commit_before>import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
parser.parse_args()
<commit_msg>Add subparser for run analytics commands<commit_after>import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(help='commands')
# A list command
list_parser = subparsers.add_parser('list', help='List commands')
list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice')
# An run command to execute the analysis
run_parser = subparsers.add_parser('run', help='Run commands')
run_parser.add_argument('run_commands', help='Run analytics based on argument', nargs='?', default='basic')
if __name__ == '__main__':
args = parser.parse_args()
print args
|
1101fd3855c90ece679e4b9af37c5f3f5dc343eb
|
spacy/en/__init__.py
|
spacy/en/__init__.py
|
# coding: utf8
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
|
# coding: utf8
from __future__ import unicode_literals
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
|
Fix formatting and remove unused imports
|
Fix formatting and remove unused imports
|
Python
|
mit
|
recognai/spaCy,raphael0202/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,raphael0202/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,raphael0202/spaCy,raphael0202/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,explosion/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,raphael0202/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy
|
# coding: utf8
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
Fix formatting and remove unused imports
|
# coding: utf8
from __future__ import unicode_literals
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
|
<commit_before># coding: utf8
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
<commit_msg>Fix formatting and remove unused imports<commit_after>
|
# coding: utf8
from __future__ import unicode_literals
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
|
# coding: utf8
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
Fix formatting and remove unused imports# coding: utf8
from __future__ import unicode_literals
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
|
<commit_before># coding: utf8
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
<commit_msg>Fix formatting and remove unused imports<commit_after># coding: utf8
from __future__ import unicode_literals
from ..language import Language
from ..lemmatizer import Lemmatizer
from ..vocab import Vocab
from ..tokenizer import Tokenizer
from ..attrs import LANG
from ..deprecated import fix_glove_vectors_loading
from .language_data import *
try:
basestring
except NameError:
basestring = str
class English(Language):
lang = 'en'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'en'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
tag_map = TAG_MAP
stop_words = STOP_WORDS
def __init__(self, **overrides):
# Special-case hack for loading the GloVe vectors, to support <1.0
overrides = fix_glove_vectors_loading(overrides)
Language.__init__(self, **overrides)
|
572207d26c51038b679832b24b2e8381209e6f87
|
collect.py
|
collect.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
firstrun = False
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
if firstrun:
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
|
Make subsequent runs only update resources
|
Make subsequent runs only update resources
|
Python
|
mit
|
mcarans/hdxscraper-acled-africa,mcarans/hdxscraper-acled-africa
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
Make subsequent runs only update resources
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
firstrun = False
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
if firstrun:
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
<commit_msg>Make subsequent runs only update resources<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
firstrun = False
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
if firstrun:
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
Make subsequent runs only update resources#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
firstrun = False
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
if firstrun:
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
<commit_msg>Make subsequent runs only update resources<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
from collector.acled_africa import generate_urls
from collector.parser import parse
from collector.register import create_datasets, create_resources, create_gallery_items
def main():
'''
Wrapper.
'''
firstrun = False
server = 'http://test-data.hdx.rwlabs.org'
objects = generate_urls()
parsed_data = parse(objects)
if firstrun:
create_datasets(datasets=parsed_data['datasets'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_gallery_items(gallery_items=parsed_data['gallery_items'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
create_resources(resources=parsed_data['resources'],
hdx_site=server, apikey=os.getenv('HDX_KEY'))
if __name__ == '__main__':
main()
|
1ef70820acb57c54f1212777e60b32db9b47c8a5
|
examples/python/test_axis_precision.py
|
examples/python/test_axis_precision.py
|
#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
plsyax(10000, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
|
#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
# Choose 5 here so there is room for non-exponential notation for an axis
# label of +0.08.
plsyax(5, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
|
Use less ridiculous value of ydigmax specified via plsyax. This works well (i.e., gives non-exponential notation for the Y axis) with the recent pldprec change in pldtik.c which removes the ceiling on digfix and simply sets it to digmax.
|
Use less ridiculous value of ydigmax specified via plsyax. This works
well (i.e., gives non-exponential notation for the Y axis) with the
recent pldprec change in pldtik.c which removes the ceiling on digfix and
simply sets it to digmax.
svn path=/trunk/; revision=10608
|
Python
|
lgpl-2.1
|
FreeScienceCommunity/PLPlot,FreeScienceCommunity/PLPlot,FreeScienceCommunity/PLPlot,FreeScienceCommunity/PLPlot,FreeScienceCommunity/PLPlot,FreeScienceCommunity/PLPlot,FreeScienceCommunity/PLPlot,FreeScienceCommunity/PLPlot,FreeScienceCommunity/PLPlot
|
#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
plsyax(10000, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
Use less ridiculous value of ydigmax specified via plsyax. This works
well (i.e., gives non-exponential notation for the Y axis) with the
recent pldprec change in pldtik.c which removes the ceiling on digfix and
simply sets it to digmax.
svn path=/trunk/; revision=10608
|
#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
# Choose 5 here so there is room for non-exponential notation for an axis
# label of +0.08.
plsyax(5, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
|
<commit_before>#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
plsyax(10000, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
<commit_msg>Use less ridiculous value of ydigmax specified via plsyax. This works
well (i.e., gives non-exponential notation for the Y axis) with the
recent pldprec change in pldtik.c which removes the ceiling on digfix and
simply sets it to digmax.
svn path=/trunk/; revision=10608<commit_after>
|
#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
# Choose 5 here so there is room for non-exponential notation for an axis
# label of +0.08.
plsyax(5, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
|
#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
plsyax(10000, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
Use less ridiculous value of ydigmax specified via plsyax. This works
well (i.e., gives non-exponential notation for the Y axis) with the
recent pldprec change in pldtik.c which removes the ceiling on digfix and
simply sets it to digmax.
svn path=/trunk/; revision=10608#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
# Choose 5 here so there is room for non-exponential notation for an axis
# label of +0.08.
plsyax(5, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
|
<commit_before>#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
plsyax(10000, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
<commit_msg>Use less ridiculous value of ydigmax specified via plsyax. This works
well (i.e., gives non-exponential notation for the Y axis) with the
recent pldprec change in pldtik.c which removes the ceiling on digfix and
simply sets it to digmax.
svn path=/trunk/; revision=10608<commit_after>#!/usr/bin/env python
# Append to effective python path so that can find plplot modules.
from plplot_python_start import *
import sys
from plplot import *
from numpy import *
# Parse and process command line arguments
plparseopts(sys.argv, PL_PARSE_FULL)
# Initialize plplot
plinit()
# Choose 5 here so there is room for non-exponential notation for an axis
# label of +0.08.
plsyax(5, 0)
pladv(0)
plvpor(0.1, 0.9, 0.1, 0.9)
plwind(0.00, 1.00, 0.00, 0.08)
plbox("bcnst", 0.0, 0, "bcnstv", 0.0, 0);
plend()
|
e144820c974548a549d0428a3b439fc0688bd2b2
|
tests/test_pathutils.py
|
tests/test_pathutils.py
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('libsass.pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
Use patch.object for python 2 compat
|
Use patch.object for python 2 compat
|
Python
|
mit
|
blitzrk/sublime_libsass,blitzrk/sublime_libsass
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('libsass.pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
Use patch.object for python 2 compat
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
<commit_before>from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('libsass.pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
<commit_msg>Use patch.object for python 2 compat<commit_after>
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('libsass.pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
Use patch.object for python 2 compatfrom os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
<commit_before>from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('libsass.pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
<commit_msg>Use patch.object for python 2 compat<commit_after>from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
894d62b6e32e3433f77ad01d41efa3e4bc81f13c
|
tempest/services/volume/json/admin/volume_hosts_client.py
|
tempest/services/volume/json/admin/volume_hosts_client.py
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, params=None):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, **params):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
|
Make argument params of list methods consistent
|
Make argument params of list methods consistent
The argument type "params" is not consistent between list methods of
compute service clients. This patch makes them consistent.
Partially implements blueprint consistent-service-method-names
Change-Id: I9c7c3034b5273de5adb87b6623b3615689a9b2d0
|
Python
|
apache-2.0
|
Juniper/tempest,bigswitch/tempest,LIS/lis-tempest,Juniper/tempest,Tesora/tesora-tempest,vedujoshi/tempest,zsoltdudas/lis-tempest,cisco-openstack/tempest,vedujoshi/tempest,sebrandon1/tempest,zsoltdudas/lis-tempest,sebrandon1/tempest,LIS/lis-tempest,openstack/tempest,Tesora/tesora-tempest,openstack/tempest,cisco-openstack/tempest,masayukig/tempest,bigswitch/tempest,masayukig/tempest
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, params=None):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
Make argument params of list methods consistent
The argument type "params" is not consistent between list methods of
compute service clients. This patch makes them consistent.
Partially implements blueprint consistent-service-method-names
Change-Id: I9c7c3034b5273de5adb87b6623b3615689a9b2d0
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, **params):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
|
<commit_before># Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, params=None):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
<commit_msg>Make argument params of list methods consistent
The argument type "params" is not consistent between list methods of
compute service clients. This patch makes them consistent.
Partially implements blueprint consistent-service-method-names
Change-Id: I9c7c3034b5273de5adb87b6623b3615689a9b2d0<commit_after>
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, **params):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, params=None):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
Make argument params of list methods consistent
The argument type "params" is not consistent between list methods of
compute service clients. This patch makes them consistent.
Partially implements blueprint consistent-service-method-names
Change-Id: I9c7c3034b5273de5adb87b6623b3615689a9b2d0# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, **params):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
|
<commit_before># Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, params=None):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
<commit_msg>Make argument params of list methods consistent
The argument type "params" is not consistent between list methods of
compute service clients. This patch makes them consistent.
Partially implements blueprint consistent-service-method-names
Change-Id: I9c7c3034b5273de5adb87b6623b3615689a9b2d0<commit_after># Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.common import service_client
class BaseVolumeHostsClient(service_client.ServiceClient):
"""Client class to send CRUD Volume Hosts API requests"""
def list_hosts(self, **params):
"""Lists all hosts."""
url = 'os-hosts'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
class VolumeHostsClient(BaseVolumeHostsClient):
"""Client class to send CRUD Volume Host API V1 requests"""
|
595dfa67764a525bcff864e1ddc513496f1376df
|
microcosm_postgres/temporary/copy.py
|
microcosm_postgres/temporary/copy.py
|
"""
Copy a table.
"""
from sqlalchemy import Table
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
column.copy(schema=schema)
for column in from_table.columns
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
|
"""
Copy a table.
"""
from sqlalchemy import Table
from microcosm_postgres.types import Serial
def copy_column(column, schema):
"""
Safely create a copy of a column.
"""
return column.copy(schema=schema)
def should_copy(column):
"""
Determine if a column should be copied.
"""
if not isinstance(column.type, Serial):
return True
if column.nullable:
return True
if not column.server_default:
return True
# do not create temporary serial values; they will be defaulted on upsert/insert
return False
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
copy_column(column, schema)
for column in from_table.columns
if should_copy(column)
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
|
Handle serial values on temporary table creation
|
Handle serial values on temporary table creation
Do not copy serial columns because they will be generated automatically
if and only they are omitted from the insert().select_from().
|
Python
|
apache-2.0
|
globality-corp/microcosm-postgres,globality-corp/microcosm-postgres
|
"""
Copy a table.
"""
from sqlalchemy import Table
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
column.copy(schema=schema)
for column in from_table.columns
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
Handle serial values on temporary table creation
Do not copy serial columns because they will be generated automatically
if and only they are omitted from the insert().select_from().
|
"""
Copy a table.
"""
from sqlalchemy import Table
from microcosm_postgres.types import Serial
def copy_column(column, schema):
"""
Safely create a copy of a column.
"""
return column.copy(schema=schema)
def should_copy(column):
"""
Determine if a column should be copied.
"""
if not isinstance(column.type, Serial):
return True
if column.nullable:
return True
if not column.server_default:
return True
# do not create temporary serial values; they will be defaulted on upsert/insert
return False
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
copy_column(column, schema)
for column in from_table.columns
if should_copy(column)
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
|
<commit_before>"""
Copy a table.
"""
from sqlalchemy import Table
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
column.copy(schema=schema)
for column in from_table.columns
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
<commit_msg>Handle serial values on temporary table creation
Do not copy serial columns because they will be generated automatically
if and only they are omitted from the insert().select_from().<commit_after>
|
"""
Copy a table.
"""
from sqlalchemy import Table
from microcosm_postgres.types import Serial
def copy_column(column, schema):
"""
Safely create a copy of a column.
"""
return column.copy(schema=schema)
def should_copy(column):
"""
Determine if a column should be copied.
"""
if not isinstance(column.type, Serial):
return True
if column.nullable:
return True
if not column.server_default:
return True
# do not create temporary serial values; they will be defaulted on upsert/insert
return False
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
copy_column(column, schema)
for column in from_table.columns
if should_copy(column)
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
|
"""
Copy a table.
"""
from sqlalchemy import Table
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
column.copy(schema=schema)
for column in from_table.columns
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
Handle serial values on temporary table creation
Do not copy serial columns because they will be generated automatically
if and only they are omitted from the insert().select_from()."""
Copy a table.
"""
from sqlalchemy import Table
from microcosm_postgres.types import Serial
def copy_column(column, schema):
"""
Safely create a copy of a column.
"""
return column.copy(schema=schema)
def should_copy(column):
"""
Determine if a column should be copied.
"""
if not isinstance(column.type, Serial):
return True
if column.nullable:
return True
if not column.server_default:
return True
# do not create temporary serial values; they will be defaulted on upsert/insert
return False
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
copy_column(column, schema)
for column in from_table.columns
if should_copy(column)
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
|
<commit_before>"""
Copy a table.
"""
from sqlalchemy import Table
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
column.copy(schema=schema)
for column in from_table.columns
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
<commit_msg>Handle serial values on temporary table creation
Do not copy serial columns because they will be generated automatically
if and only they are omitted from the insert().select_from().<commit_after>"""
Copy a table.
"""
from sqlalchemy import Table
from microcosm_postgres.types import Serial
def copy_column(column, schema):
"""
Safely create a copy of a column.
"""
return column.copy(schema=schema)
def should_copy(column):
"""
Determine if a column should be copied.
"""
if not isinstance(column.type, Serial):
return True
if column.nullable:
return True
if not column.server_default:
return True
# do not create temporary serial values; they will be defaulted on upsert/insert
return False
def copy_table(from_table, name):
"""
Copy a table.
Based on `Table.tometadata`, but simplified to remove constraints and indexes.
"""
metadata = from_table.metadata
if name in metadata.tables:
return metadata.tables[name]
schema = metadata.schema
columns = [
copy_column(column, schema)
for column in from_table.columns
if should_copy(column)
]
return Table(
name,
metadata,
schema=schema,
comment=from_table.comment,
*columns,
**from_table.kwargs,
)
|
a78f67fcf1a692633427cf0677c43f7efc760c8c
|
day-2-2.py
|
day-2-2.py
|
import re
# box_list will contain tuples in the format: (length, width, height)
box_list = []
regex = re.compile(r'^(\d+)x(\d+)x(\d+)$')
with open('day-2-input.txt', 'r') as f:
for line in f:
match = regex.match(line)
box_list.append((
int(match.group(1)),
int(match.group(2)),
int(match.group(3))
))
total_ribbon = 0
for box in box_list:
volume = box[0] * box[1] * box[2]
smallest_perimeter = sorted(box)[0] * 2 + sorted(box)[1] * 2
total_ribbon += volume + smallest_perimeter
print(total_ribbon)
# My answer: 3783758
|
Complete day 2 part 2
|
Complete day 2 part 2
|
Python
|
mit
|
foxscotch/advent-of-code,foxscotch/advent-of-code
|
Complete day 2 part 2
|
import re
# box_list will contain tuples in the format: (length, width, height)
box_list = []
regex = re.compile(r'^(\d+)x(\d+)x(\d+)$')
with open('day-2-input.txt', 'r') as f:
for line in f:
match = regex.match(line)
box_list.append((
int(match.group(1)),
int(match.group(2)),
int(match.group(3))
))
total_ribbon = 0
for box in box_list:
volume = box[0] * box[1] * box[2]
smallest_perimeter = sorted(box)[0] * 2 + sorted(box)[1] * 2
total_ribbon += volume + smallest_perimeter
print(total_ribbon)
# My answer: 3783758
|
<commit_before><commit_msg>Complete day 2 part 2<commit_after>
|
import re
# box_list will contain tuples in the format: (length, width, height)
box_list = []
regex = re.compile(r'^(\d+)x(\d+)x(\d+)$')
with open('day-2-input.txt', 'r') as f:
for line in f:
match = regex.match(line)
box_list.append((
int(match.group(1)),
int(match.group(2)),
int(match.group(3))
))
total_ribbon = 0
for box in box_list:
volume = box[0] * box[1] * box[2]
smallest_perimeter = sorted(box)[0] * 2 + sorted(box)[1] * 2
total_ribbon += volume + smallest_perimeter
print(total_ribbon)
# My answer: 3783758
|
Complete day 2 part 2import re
# box_list will contain tuples in the format: (length, width, height)
box_list = []
regex = re.compile(r'^(\d+)x(\d+)x(\d+)$')
with open('day-2-input.txt', 'r') as f:
for line in f:
match = regex.match(line)
box_list.append((
int(match.group(1)),
int(match.group(2)),
int(match.group(3))
))
total_ribbon = 0
for box in box_list:
volume = box[0] * box[1] * box[2]
smallest_perimeter = sorted(box)[0] * 2 + sorted(box)[1] * 2
total_ribbon += volume + smallest_perimeter
print(total_ribbon)
# My answer: 3783758
|
<commit_before><commit_msg>Complete day 2 part 2<commit_after>import re
# box_list will contain tuples in the format: (length, width, height)
box_list = []
regex = re.compile(r'^(\d+)x(\d+)x(\d+)$')
with open('day-2-input.txt', 'r') as f:
for line in f:
match = regex.match(line)
box_list.append((
int(match.group(1)),
int(match.group(2)),
int(match.group(3))
))
total_ribbon = 0
for box in box_list:
volume = box[0] * box[1] * box[2]
smallest_perimeter = sorted(box)[0] * 2 + sorted(box)[1] * 2
total_ribbon += volume + smallest_perimeter
print(total_ribbon)
# My answer: 3783758
|
|
064c0161e91e24217d712cb80656a2d0dad8c3b6
|
pretty.py
|
pretty.py
|
from termcolor import colored
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer, AbsoluteETA
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETA()], **kwargs).start()
|
from termcolor import colored
import datetime
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer
class AbsoluteETABrief(Timer):
'''Variation of progressbar.AbsoluteETA which is smaller for 80cols.'''
def _eta(self, progress, data, value, elapsed):
"""Update the widget to show the ETA or total time when finished."""
if value == progress.min_value: # pragma: no cover
return 'ETA: --:--:--'
elif progress.end_time:
return 'Fin: %s' % self._format(progress.end_time)
else:
eta = elapsed * progress.max_value / value - elapsed
now = datetime.datetime.now()
eta_abs = now + datetime.timedelta(seconds=eta)
return 'ETA: %s' % self._format(eta_abs)
def _format(self, t):
return t.strftime("%H:%M:%S")
def __call__(self, progress, data):
'''Updates the widget to show the ETA or total time when finished.'''
return self._eta(progress, data, data['value'],
data['total_seconds_elapsed'])
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETABrief()], **kwargs).start()
|
Fix progress bar to be 80-col-friendly.
|
Fix progress bar to be 80-col-friendly.
|
Python
|
mit
|
jonhoo/periscope,jonhoo/periscope
|
from termcolor import colored
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer, AbsoluteETA
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETA()], **kwargs).start()
Fix progress bar to be 80-col-friendly.
|
from termcolor import colored
import datetime
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer
class AbsoluteETABrief(Timer):
'''Variation of progressbar.AbsoluteETA which is smaller for 80cols.'''
def _eta(self, progress, data, value, elapsed):
"""Update the widget to show the ETA or total time when finished."""
if value == progress.min_value: # pragma: no cover
return 'ETA: --:--:--'
elif progress.end_time:
return 'Fin: %s' % self._format(progress.end_time)
else:
eta = elapsed * progress.max_value / value - elapsed
now = datetime.datetime.now()
eta_abs = now + datetime.timedelta(seconds=eta)
return 'ETA: %s' % self._format(eta_abs)
def _format(self, t):
return t.strftime("%H:%M:%S")
def __call__(self, progress, data):
'''Updates the widget to show the ETA or total time when finished.'''
return self._eta(progress, data, data['value'],
data['total_seconds_elapsed'])
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETABrief()], **kwargs).start()
|
<commit_before>from termcolor import colored
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer, AbsoluteETA
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETA()], **kwargs).start()
<commit_msg>Fix progress bar to be 80-col-friendly.<commit_after>
|
from termcolor import colored
import datetime
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer
class AbsoluteETABrief(Timer):
'''Variation of progressbar.AbsoluteETA which is smaller for 80cols.'''
def _eta(self, progress, data, value, elapsed):
"""Update the widget to show the ETA or total time when finished."""
if value == progress.min_value: # pragma: no cover
return 'ETA: --:--:--'
elif progress.end_time:
return 'Fin: %s' % self._format(progress.end_time)
else:
eta = elapsed * progress.max_value / value - elapsed
now = datetime.datetime.now()
eta_abs = now + datetime.timedelta(seconds=eta)
return 'ETA: %s' % self._format(eta_abs)
def _format(self, t):
return t.strftime("%H:%M:%S")
def __call__(self, progress, data):
'''Updates the widget to show the ETA or total time when finished.'''
return self._eta(progress, data, data['value'],
data['total_seconds_elapsed'])
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETABrief()], **kwargs).start()
|
from termcolor import colored
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer, AbsoluteETA
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETA()], **kwargs).start()
Fix progress bar to be 80-col-friendly.from termcolor import colored
import datetime
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer
class AbsoluteETABrief(Timer):
'''Variation of progressbar.AbsoluteETA which is smaller for 80cols.'''
def _eta(self, progress, data, value, elapsed):
"""Update the widget to show the ETA or total time when finished."""
if value == progress.min_value: # pragma: no cover
return 'ETA: --:--:--'
elif progress.end_time:
return 'Fin: %s' % self._format(progress.end_time)
else:
eta = elapsed * progress.max_value / value - elapsed
now = datetime.datetime.now()
eta_abs = now + datetime.timedelta(seconds=eta)
return 'ETA: %s' % self._format(eta_abs)
def _format(self, t):
return t.strftime("%H:%M:%S")
def __call__(self, progress, data):
'''Updates the widget to show the ETA or total time when finished.'''
return self._eta(progress, data, data['value'],
data['total_seconds_elapsed'])
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETABrief()], **kwargs).start()
|
<commit_before>from termcolor import colored
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer, AbsoluteETA
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETA()], **kwargs).start()
<commit_msg>Fix progress bar to be 80-col-friendly.<commit_after>from termcolor import colored
import datetime
def section(msg):
print(colored("\n::", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
def task(msg):
print(colored("==>", "green", attrs=["bold"]), colored(msg, attrs=["bold"]))
def subtask(msg):
print(colored(" ->", "blue", attrs=["bold"]), colored(msg, attrs=["bold"]))
from progressbar import Bar, SimpleProgress, Percentage, ProgressBar, Timer
class AbsoluteETABrief(Timer):
'''Variation of progressbar.AbsoluteETA which is smaller for 80cols.'''
def _eta(self, progress, data, value, elapsed):
"""Update the widget to show the ETA or total time when finished."""
if value == progress.min_value: # pragma: no cover
return 'ETA: --:--:--'
elif progress.end_time:
return 'Fin: %s' % self._format(progress.end_time)
else:
eta = elapsed * progress.max_value / value - elapsed
now = datetime.datetime.now()
eta_abs = now + datetime.timedelta(seconds=eta)
return 'ETA: %s' % self._format(eta_abs)
def _format(self, t):
return t.strftime("%H:%M:%S")
def __call__(self, progress, data):
'''Updates the widget to show the ETA or total time when finished.'''
return self._eta(progress, data, data['value'],
data['total_seconds_elapsed'])
def progress(number, **kwargs):
return ProgressBar(max_value=number, widgets=[Percentage(), ' (', SimpleProgress(), ') ', Bar(), ' ', Timer(), ' ', AbsoluteETABrief()], **kwargs).start()
|
42755823774f4a57849c54d5812e885dfbeee34c
|
camelot/roundtable/migrations/0002_add_knight_data.py
|
camelot/roundtable/migrations/0002_add_knight_data.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
pass
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
Use RunPython operation to perform data migration.
|
Use RunPython operation to perform data migration.
|
Python
|
bsd-2-clause
|
jambonrose/djangocon2014-updj17
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
]
Use RunPython operation to perform data migration.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
pass
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
]
<commit_msg>Use RunPython operation to perform data migration.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
pass
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
]
Use RunPython operation to perform data migration.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
pass
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
]
<commit_msg>Use RunPython operation to perform data migration.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_knight_data(apps, schema_editor):
pass
def remove_knight_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('roundtable', '0001_initial'),
]
operations = [
migrations.RunPython(
add_knight_data,
reverse_code=remove_knight_data),
]
|
658a1298e8d3c1645ed4b483e75d0a1b684fd162
|
app.py
|
app.py
|
""" app.py """
from flask import Flask, render_template
import requests
app = Flask(__name__)
def get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
return 'Unavailable'
return response.json().get('datetime')
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
""" app.py """
from flask import Flask, render_template
import pybreaker
import requests
app = Flask(__name__)
time_breaker = pybreaker.CircuitBreaker(fail_max=3, reset_timeout=30)
@time_breaker
def _get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
# please note that this is an example implementation and is
# dangerously masking the original exception. i highly recommend
# you take the appropriate means to capture/log the exception so
# that you are aware of the underlying problem that is triggering
# the circuit breaker!
raise pybreaker.CircuitBreakerError
return response.json().get('datetime')
def get_time():
try:
return _get_time()
except pybreaker.CircuitBreakerError:
return 'Unavailable'
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
Update get_time to use a circuit breaker.
|
Update get_time to use a circuit breaker.
|
Python
|
mit
|
danriti/short-circuit,danriti/short-circuit
|
""" app.py """
from flask import Flask, render_template
import requests
app = Flask(__name__)
def get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
return 'Unavailable'
return response.json().get('datetime')
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
Update get_time to use a circuit breaker.
|
""" app.py """
from flask import Flask, render_template
import pybreaker
import requests
app = Flask(__name__)
time_breaker = pybreaker.CircuitBreaker(fail_max=3, reset_timeout=30)
@time_breaker
def _get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
# please note that this is an example implementation and is
# dangerously masking the original exception. i highly recommend
# you take the appropriate means to capture/log the exception so
# that you are aware of the underlying problem that is triggering
# the circuit breaker!
raise pybreaker.CircuitBreakerError
return response.json().get('datetime')
def get_time():
try:
return _get_time()
except pybreaker.CircuitBreakerError:
return 'Unavailable'
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
<commit_before>""" app.py """
from flask import Flask, render_template
import requests
app = Flask(__name__)
def get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
return 'Unavailable'
return response.json().get('datetime')
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
<commit_msg>Update get_time to use a circuit breaker.<commit_after>
|
""" app.py """
from flask import Flask, render_template
import pybreaker
import requests
app = Flask(__name__)
time_breaker = pybreaker.CircuitBreaker(fail_max=3, reset_timeout=30)
@time_breaker
def _get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
# please note that this is an example implementation and is
# dangerously masking the original exception. i highly recommend
# you take the appropriate means to capture/log the exception so
# that you are aware of the underlying problem that is triggering
# the circuit breaker!
raise pybreaker.CircuitBreakerError
return response.json().get('datetime')
def get_time():
try:
return _get_time()
except pybreaker.CircuitBreakerError:
return 'Unavailable'
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
""" app.py """
from flask import Flask, render_template
import requests
app = Flask(__name__)
def get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
return 'Unavailable'
return response.json().get('datetime')
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
Update get_time to use a circuit breaker.""" app.py """
from flask import Flask, render_template
import pybreaker
import requests
app = Flask(__name__)
time_breaker = pybreaker.CircuitBreaker(fail_max=3, reset_timeout=30)
@time_breaker
def _get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
# please note that this is an example implementation and is
# dangerously masking the original exception. i highly recommend
# you take the appropriate means to capture/log the exception so
# that you are aware of the underlying problem that is triggering
# the circuit breaker!
raise pybreaker.CircuitBreakerError
return response.json().get('datetime')
def get_time():
try:
return _get_time()
except pybreaker.CircuitBreakerError:
return 'Unavailable'
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
<commit_before>""" app.py """
from flask import Flask, render_template
import requests
app = Flask(__name__)
def get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
return 'Unavailable'
return response.json().get('datetime')
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
<commit_msg>Update get_time to use a circuit breaker.<commit_after>""" app.py """
from flask import Flask, render_template
import pybreaker
import requests
app = Flask(__name__)
time_breaker = pybreaker.CircuitBreaker(fail_max=3, reset_timeout=30)
@time_breaker
def _get_time():
try:
response = requests.get('http://localhost:3001/time', timeout=3.0)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
# please note that this is an example implementation and is
# dangerously masking the original exception. i highly recommend
# you take the appropriate means to capture/log the exception so
# that you are aware of the underlying problem that is triggering
# the circuit breaker!
raise pybreaker.CircuitBreakerError
return response.json().get('datetime')
def get_time():
try:
return _get_time()
except pybreaker.CircuitBreakerError:
return 'Unavailable'
def get_user():
response = requests.get('http://localhost:3002/user')
return response.json().get('name')
@app.errorhandler(500)
def page_not_found(_):
return 'Server error', 500
@app.route("/")
def hello():
time = get_time()
name = get_user()
return render_template('hello.html', name=name, time=time)
if __name__ == "__main__":
app.run(port=3000, debug=True)
|
c8b9c302421f0f49f00db381954e7fc7cc657f52
|
application/__init__.py
|
application/__init__.py
|
import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
Add proxy fix as in lr this will run with reverse proxy
|
Add proxy fix as in lr this will run with reverse proxy
|
Python
|
mit
|
LandRegistry/historian-alpha,LandRegistry/historian-alpha,LandRegistry/historian-alpha
|
import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
Add proxy fix as in lr this will run with reverse proxy
|
import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
<commit_before>import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
<commit_msg>Add proxy fix as in lr this will run with reverse proxy<commit_after>
|
import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
Add proxy fix as in lr this will run with reverse proxyimport os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
<commit_before>import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
<commit_msg>Add proxy fix as in lr this will run with reverse proxy<commit_after>import os
import logging
from flask import Flask
from flask.ext.basicauth import BasicAuth
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
db = SQLAlchemy(app)
# auth
if os.environ.get('BASIC_AUTH_USERNAME'):
app.config['BASIC_AUTH_FORCE'] = True
basic_auth = BasicAuth(app)
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
2250180ea7cc0eb91c8b1cdc7d565397326f480b
|
UM/Scene/SceneNodeDecorator.py
|
UM/Scene/SceneNodeDecorator.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = node
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = node
def getNode(self):
return self._node
|
Add a getter for a Decorator's Scene Node
|
Add a getter for a Decorator's Scene Node
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = nodeAdd a getter for a Decorator's Scene Node
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = node
def getNode(self):
return self._node
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = node<commit_msg>Add a getter for a Decorator's Scene Node<commit_after>
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = node
def getNode(self):
return self._node
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = nodeAdd a getter for a Decorator's Scene Node# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = node
def getNode(self):
return self._node
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = node<commit_msg>Add a getter for a Decorator's Scene Node<commit_after># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SceneNodeDecorator():
def __init__(self):
super().__init__()
self._node = None
def setNode(self, node):
self._node = node
def getNode(self):
return self._node
|
2301b0bfdb216f31428e6c9ca0bf6b2951a5e64b
|
symposion/forms.py
|
symposion/forms.py
|
from django import forms
import account.forms
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField()
last_name = forms.CharField()
email_confirm = forms.EmailField(label="Confirm Email")
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
del self.fields["username"]
self.fields.keyOrder = [
"email",
"email_confirm",
"first_name",
"last_name",
"password",
"password_confirm"
]
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
|
try:
from collections import OrderedDict
except ImportError:
OrderedDict = None
import account.forms
from django import forms
from django.utils.translation import ugettext_lazy as _
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField(label=_("First name"))
last_name = forms.CharField(label=_("Last name"))
email_confirm = forms.EmailField(label=_("Confirm Email"))
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
field_order = [
"first_name",
"last_name",
"email",
"email_confirm",
"password",
"password_confirm"
]
del self.fields["username"]
if not OrderedDict or hasattr(self.fields, "keyOrder"):
self.fields.keyOrder = field_order
else:
self.fields = OrderedDict((k, self.fields[k]) for k in field_order)
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
|
Fix order fields in signup form
|
Fix order fields in signup form
|
Python
|
bsd-3-clause
|
toulibre/symposion,toulibre/symposion
|
from django import forms
import account.forms
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField()
last_name = forms.CharField()
email_confirm = forms.EmailField(label="Confirm Email")
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
del self.fields["username"]
self.fields.keyOrder = [
"email",
"email_confirm",
"first_name",
"last_name",
"password",
"password_confirm"
]
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
Fix order fields in signup form
|
try:
from collections import OrderedDict
except ImportError:
OrderedDict = None
import account.forms
from django import forms
from django.utils.translation import ugettext_lazy as _
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField(label=_("First name"))
last_name = forms.CharField(label=_("Last name"))
email_confirm = forms.EmailField(label=_("Confirm Email"))
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
field_order = [
"first_name",
"last_name",
"email",
"email_confirm",
"password",
"password_confirm"
]
del self.fields["username"]
if not OrderedDict or hasattr(self.fields, "keyOrder"):
self.fields.keyOrder = field_order
else:
self.fields = OrderedDict((k, self.fields[k]) for k in field_order)
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
|
<commit_before>from django import forms
import account.forms
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField()
last_name = forms.CharField()
email_confirm = forms.EmailField(label="Confirm Email")
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
del self.fields["username"]
self.fields.keyOrder = [
"email",
"email_confirm",
"first_name",
"last_name",
"password",
"password_confirm"
]
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
<commit_msg>Fix order fields in signup form<commit_after>
|
try:
from collections import OrderedDict
except ImportError:
OrderedDict = None
import account.forms
from django import forms
from django.utils.translation import ugettext_lazy as _
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField(label=_("First name"))
last_name = forms.CharField(label=_("Last name"))
email_confirm = forms.EmailField(label=_("Confirm Email"))
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
field_order = [
"first_name",
"last_name",
"email",
"email_confirm",
"password",
"password_confirm"
]
del self.fields["username"]
if not OrderedDict or hasattr(self.fields, "keyOrder"):
self.fields.keyOrder = field_order
else:
self.fields = OrderedDict((k, self.fields[k]) for k in field_order)
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
|
from django import forms
import account.forms
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField()
last_name = forms.CharField()
email_confirm = forms.EmailField(label="Confirm Email")
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
del self.fields["username"]
self.fields.keyOrder = [
"email",
"email_confirm",
"first_name",
"last_name",
"password",
"password_confirm"
]
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
Fix order fields in signup formtry:
from collections import OrderedDict
except ImportError:
OrderedDict = None
import account.forms
from django import forms
from django.utils.translation import ugettext_lazy as _
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField(label=_("First name"))
last_name = forms.CharField(label=_("Last name"))
email_confirm = forms.EmailField(label=_("Confirm Email"))
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
field_order = [
"first_name",
"last_name",
"email",
"email_confirm",
"password",
"password_confirm"
]
del self.fields["username"]
if not OrderedDict or hasattr(self.fields, "keyOrder"):
self.fields.keyOrder = field_order
else:
self.fields = OrderedDict((k, self.fields[k]) for k in field_order)
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
|
<commit_before>from django import forms
import account.forms
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField()
last_name = forms.CharField()
email_confirm = forms.EmailField(label="Confirm Email")
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
del self.fields["username"]
self.fields.keyOrder = [
"email",
"email_confirm",
"first_name",
"last_name",
"password",
"password_confirm"
]
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
<commit_msg>Fix order fields in signup form<commit_after>try:
from collections import OrderedDict
except ImportError:
OrderedDict = None
import account.forms
from django import forms
from django.utils.translation import ugettext_lazy as _
class SignupForm(account.forms.SignupForm):
first_name = forms.CharField(label=_("First name"))
last_name = forms.CharField(label=_("Last name"))
email_confirm = forms.EmailField(label=_("Confirm Email"))
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
field_order = [
"first_name",
"last_name",
"email",
"email_confirm",
"password",
"password_confirm"
]
del self.fields["username"]
if not OrderedDict or hasattr(self.fields, "keyOrder"):
self.fields.keyOrder = field_order
else:
self.fields = OrderedDict((k, self.fields[k]) for k in field_order)
def clean_email_confirm(self):
email = self.cleaned_data.get("email")
email_confirm = self.cleaned_data["email_confirm"]
if email:
if email != email_confirm:
raise forms.ValidationError(
"Email address must match previously typed email address")
return email_confirm
|
bece33fc211f436facd1d1b4c713a46ebaad372c
|
examples/miniapps/fastapi/giphynavigator/endpoints.py
|
examples/miniapps/fastapi/giphynavigator/endpoints.py
|
"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service = Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
|
"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service=Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
|
Fix fastapi example flake8 error
|
Fix fastapi example flake8 error
|
Python
|
bsd-3-clause
|
ets-labs/dependency_injector,ets-labs/python-dependency-injector,rmk135/dependency_injector,rmk135/objects
|
"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service = Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
Fix fastapi example flake8 error
|
"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service=Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
|
<commit_before>"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service = Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
<commit_msg>Fix fastapi example flake8 error<commit_after>
|
"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service=Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
|
"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service = Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
Fix fastapi example flake8 error"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service=Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
|
<commit_before>"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service = Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
<commit_msg>Fix fastapi example flake8 error<commit_after>"""Endpoints module."""
from dependency_injector.wiring import Provide
from .containers import Container
async def index(
query: str = Provide[Container.config.default.query],
limit: int = Provide[Container.config.default.limit.as_int()],
search_service=Provide[Container.search_service],
):
gifs = await search_service.search(query, limit)
return {
'query': query,
'limit': limit,
'gifs': gifs,
}
|
18d973d71255d389369cc4450f721512a13ad6cb
|
src/impl/geocoder.py
|
src/impl/geocoder.py
|
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
return loc.address
|
from Geohash import geohash
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
self._reverse_cache_geohash_length = reverse_cache_geohash
self._reverse_cache = {}
def _using_cache(self):
return 0 < self._reverse_cache_geohash_length <= 12
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
addr = None
# try to get the address from the local cache, if we're using it
if self._using_cache():
ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
addr = self._reverse_cache.get(ghash)
# if we didn't get the address from the cache, or we're not using the cache
# then get it from Google
if not addr:
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
addr = loc.address
# if we're using the cache, save the value we just got back
if addr and self._using_cache():
self._reverse_cache[ghash] = addr
return addr
|
Add in-memory geohash cache for reverse geocoding.
|
Add in-memory geohash cache for reverse geocoding.
|
Python
|
mit
|
cbigler/jackrabbit-googlev3-geocoder
|
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
return loc.address
Add in-memory geohash cache for reverse geocoding.
|
from Geohash import geohash
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
self._reverse_cache_geohash_length = reverse_cache_geohash
self._reverse_cache = {}
def _using_cache(self):
return 0 < self._reverse_cache_geohash_length <= 12
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
addr = None
# try to get the address from the local cache, if we're using it
if self._using_cache():
ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
addr = self._reverse_cache.get(ghash)
# if we didn't get the address from the cache, or we're not using the cache
# then get it from Google
if not addr:
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
addr = loc.address
# if we're using the cache, save the value we just got back
if addr and self._using_cache():
self._reverse_cache[ghash] = addr
return addr
|
<commit_before>import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
return loc.address
<commit_msg>Add in-memory geohash cache for reverse geocoding.<commit_after>
|
from Geohash import geohash
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
self._reverse_cache_geohash_length = reverse_cache_geohash
self._reverse_cache = {}
def _using_cache(self):
return 0 < self._reverse_cache_geohash_length <= 12
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
addr = None
# try to get the address from the local cache, if we're using it
if self._using_cache():
ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
addr = self._reverse_cache.get(ghash)
# if we didn't get the address from the cache, or we're not using the cache
# then get it from Google
if not addr:
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
addr = loc.address
# if we're using the cache, save the value we just got back
if addr and self._using_cache():
self._reverse_cache[ghash] = addr
return addr
|
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
return loc.address
Add in-memory geohash cache for reverse geocoding.from Geohash import geohash
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
self._reverse_cache_geohash_length = reverse_cache_geohash
self._reverse_cache = {}
def _using_cache(self):
return 0 < self._reverse_cache_geohash_length <= 12
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
addr = None
# try to get the address from the local cache, if we're using it
if self._using_cache():
ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
addr = self._reverse_cache.get(ghash)
# if we didn't get the address from the cache, or we're not using the cache
# then get it from Google
if not addr:
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
addr = loc.address
# if we're using the cache, save the value we just got back
if addr and self._using_cache():
self._reverse_cache[ghash] = addr
return addr
|
<commit_before>import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
return loc.address
<commit_msg>Add in-memory geohash cache for reverse geocoding.<commit_after>from Geohash import geohash
import geopy
from rate_limiter import RateLimiter
class Geocoder(object):
def __init__(self, api_key=None, client_id=None, secret_key=None, reverse_cache_geohash=9):
if api_key:
self._geolocator = geopy.GoogleV3(api_key=api_key)
elif client_id and secret_key:
self._geolocator = geopy.GoogleV3(client_id=client_id, secret_key=secret_key)
else:
raise ValueError('One of either the api_key or both client_id and secret_key must be provided.')
self._geocode_limiter = RateLimiter(10)
self._reverse_limiter = RateLimiter(10)
self._reverse_cache_geohash_length = reverse_cache_geohash
self._reverse_cache = {}
def _using_cache(self):
return 0 < self._reverse_cache_geohash_length <= 12
def geocode(self, address):
self._geocode_limiter.wait()
loc = self._geolocator.geocode(address)
return [loc.latitude, loc.longitude]
def reverse(self, latitude, longitude):
addr = None
# try to get the address from the local cache, if we're using it
if self._using_cache():
ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
addr = self._reverse_cache.get(ghash)
# if we didn't get the address from the cache, or we're not using the cache
# then get it from Google
if not addr:
self._reverse_limiter.wait()
loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
addr = loc.address
# if we're using the cache, save the value we just got back
if addr and self._using_cache():
self._reverse_cache[ghash] = addr
return addr
|
08a95f7793d496d36cc0a753694c2975b2f30c68
|
accelerator/migrations/0074_update_url_to_community.py
|
accelerator/migrations/0074_update_url_to_community.py
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = "/people"
mentor_url = "/directory"
community_url = "/community"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people"]
mentor_url = "/directory"
mentor_refinement_url = "/directory/?refinementList%5Bhome_program_family%5D%5B0%5D=Israel"
community_url = "/community"
community_refinement_url = "/community/?refinementList%5Bprogram_family_names%5D%5B0%5D=Israel"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
if mentor_refinement_url:
SiteRedirectPage.objects.filter(
mentor_refinement_url
).update(community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
Add another constraint for the url
|
[AC-9046] Add another constraint for the url
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = "/people"
mentor_url = "/directory"
community_url = "/community"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
[AC-9046] Add another constraint for the url
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people"]
mentor_url = "/directory"
mentor_refinement_url = "/directory/?refinementList%5Bhome_program_family%5D%5B0%5D=Israel"
community_url = "/community"
community_refinement_url = "/community/?refinementList%5Bprogram_family_names%5D%5B0%5D=Israel"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
if mentor_refinement_url:
SiteRedirectPage.objects.filter(
mentor_refinement_url
).update(community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
<commit_before># Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = "/people"
mentor_url = "/directory"
community_url = "/community"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
<commit_msg>[AC-9046] Add another constraint for the url<commit_after>
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people"]
mentor_url = "/directory"
mentor_refinement_url = "/directory/?refinementList%5Bhome_program_family%5D%5B0%5D=Israel"
community_url = "/community"
community_refinement_url = "/community/?refinementList%5Bprogram_family_names%5D%5B0%5D=Israel"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
if mentor_refinement_url:
SiteRedirectPage.objects.filter(
mentor_refinement_url
).update(community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = "/people"
mentor_url = "/directory"
community_url = "/community"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
[AC-9046] Add another constraint for the url# Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people"]
mentor_url = "/directory"
mentor_refinement_url = "/directory/?refinementList%5Bhome_program_family%5D%5B0%5D=Israel"
community_url = "/community"
community_refinement_url = "/community/?refinementList%5Bprogram_family_names%5D%5B0%5D=Israel"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
if mentor_refinement_url:
SiteRedirectPage.objects.filter(
mentor_refinement_url
).update(community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
<commit_before># Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = "/people"
mentor_url = "/directory"
community_url = "/community"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
<commit_msg>[AC-9046] Add another constraint for the url<commit_after># Generated by Django 2.2.10 on 2021-11-05 12:29
from django.db import migrations
from django.db.models.query_utils import Q
def update_url_to_community(apps, schema_editor):
people_url = ["/people", "/people"]
mentor_url = "/directory"
mentor_refinement_url = "/directory/?refinementList%5Bhome_program_family%5D%5B0%5D=Israel"
community_url = "/community"
community_refinement_url = "/community/?refinementList%5Bprogram_family_names%5D%5B0%5D=Israel"
SiteRedirectPage = apps.get_model('accelerator', 'SiteRedirectPage')
SiteRedirectPage.objects.filter(
Q(new_url__in=people_url) | Q(new_url=mentor_url)
).update(new_url=community_url)
if mentor_refinement_url:
SiteRedirectPage.objects.filter(
mentor_refinement_url
).update(community_refinement_url)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0073_auto_20210909_1706'),
]
operations = [
migrations.RunPython(update_url_to_community,
migrations.RunPython.noop)
]
|
d9b00cbaf794ba4e10cd264e8d0e722f9f21fa26
|
pastas/version.py
|
pastas/version.py
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.17.1'
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.18.0b'
|
Update Dev back to v0.18.0b
|
Update Dev back to v0.18.0b
|
Python
|
mit
|
pastas/pasta,pastas/pastas
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.17.1'
Update Dev back to v0.18.0b
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.18.0b'
|
<commit_before># This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.17.1'
<commit_msg>Update Dev back to v0.18.0b<commit_after>
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.18.0b'
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.17.1'
Update Dev back to v0.18.0b# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.18.0b'
|
<commit_before># This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.17.1'
<commit_msg>Update Dev back to v0.18.0b<commit_after># This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.18.0b'
|
a460713d36e8310a9f975d13d49579e77d83dfe7
|
examples/with-shapely.py
|
examples/with-shapely.py
|
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert geom.is_valid
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
|
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert clean.is_valid
assert clean.geom_type == 'Polygon'
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
|
Fix validity assertion and add another.
|
Fix validity assertion and add another.
|
Python
|
bsd-3-clause
|
johanvdw/Fiona,Toblerity/Fiona,rbuffat/Fiona,Toblerity/Fiona,perrygeo/Fiona,perrygeo/Fiona,sgillies/Fiona,rbuffat/Fiona
|
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert geom.is_valid
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
Fix validity assertion and add another.
|
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert clean.is_valid
assert clean.geom_type == 'Polygon'
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
|
<commit_before>
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert geom.is_valid
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
<commit_msg>Fix validity assertion and add another.<commit_after>
|
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert clean.is_valid
assert clean.geom_type == 'Polygon'
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
|
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert geom.is_valid
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
Fix validity assertion and add another.
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert clean.is_valid
assert clean.geom_type == 'Polygon'
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
|
<commit_before>
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert geom.is_valid
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
<commit_msg>Fix validity assertion and add another.<commit_after>
import logging
import sys
from shapely.geometry import mapping, shape
from fiona import collection
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
with collection("docs/data/test_uk.shp", "r") as input:
schema = input.schema.copy()
with collection(
"with-shapely.shp", "w", "ESRI Shapefile", schema
) as output:
for f in input:
try:
geom = shape(f['geometry'])
if not geom.is_valid:
clean = geom.buffer(0.0)
assert clean.is_valid
assert clean.geom_type == 'Polygon'
geom = clean
f['geometry'] = mapping(geom)
output.write(f)
except Exception, e:
# Writing uncleanable features to a different shapefile
# is another option.
logging.exception("Error cleaning feature %s:", f['id'])
|
6fa919ae5ef0755ec3c84e11aad9aa98a016fad4
|
wafw00f/plugins/ptaf.py
|
wafw00f/plugins/ptaf.py
|
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,15}?pre>')
]
if all(i for i in schemes):
return True
return False
|
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,35}?pre>')
]
if all(i for i in schemes):
return True
return False
|
Fix error in Request ID regex
|
Fix error in Request ID regex
Fix error in Request ID regex that breaks correct WAF identification.
Changed 15 to 35, because 15 symbols between date and 'pre>' is not enough for this signature.
Sample of WAF block response:
<h1>Forbidden</h1><pre>Request ID: 2017-07-31-13-59-56-72BCA33A11EC3784</pre>
|
Python
|
bsd-3-clause
|
EnableSecurity/wafw00f
|
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,15}?pre>')
]
if all(i for i in schemes):
return True
return FalseFix error in Request ID regex
Fix error in Request ID regex that breaks correct WAF identification.
Changed 15 to 35, because 15 symbols between date and 'pre>' is not enough for this signature.
Sample of WAF block response:
<h1>Forbidden</h1><pre>Request ID: 2017-07-31-13-59-56-72BCA33A11EC3784</pre>
|
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,35}?pre>')
]
if all(i for i in schemes):
return True
return False
|
<commit_before>#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,15}?pre>')
]
if all(i for i in schemes):
return True
return False<commit_msg>Fix error in Request ID regex
Fix error in Request ID regex that breaks correct WAF identification.
Changed 15 to 35, because 15 symbols between date and 'pre>' is not enough for this signature.
Sample of WAF block response:
<h1>Forbidden</h1><pre>Request ID: 2017-07-31-13-59-56-72BCA33A11EC3784</pre><commit_after>
|
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,35}?pre>')
]
if all(i for i in schemes):
return True
return False
|
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,15}?pre>')
]
if all(i for i in schemes):
return True
return FalseFix error in Request ID regex
Fix error in Request ID regex that breaks correct WAF identification.
Changed 15 to 35, because 15 symbols between date and 'pre>' is not enough for this signature.
Sample of WAF block response:
<h1>Forbidden</h1><pre>Request ID: 2017-07-31-13-59-56-72BCA33A11EC3784</pre>#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,35}?pre>')
]
if all(i for i in schemes):
return True
return False
|
<commit_before>#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,15}?pre>')
]
if all(i for i in schemes):
return True
return False<commit_msg>Fix error in Request ID regex
Fix error in Request ID regex that breaks correct WAF identification.
Changed 15 to 35, because 15 symbols between date and 'pre>' is not enough for this signature.
Sample of WAF block response:
<h1>Forbidden</h1><pre>Request ID: 2017-07-31-13-59-56-72BCA33A11EC3784</pre><commit_after>#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'PT Application Firewall (Positive Technologies)'
def is_waf(self):
schemes = [
self.matchContent(r'<h1.{0,10}?Forbidden'),
self.matchContent(r'<pre>Request.ID:.{0,10}?\d{4}\-(\d{2})+.{0,35}?pre>')
]
if all(i for i in schemes):
return True
return False
|
33794ea942fe7110eccddcd3ed397fefea77f7b2
|
kokki/cookbooks/aws/recipes/default.py
|
kokki/cookbooks/aws/recipes/default.py
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
Install github verison of boto in aws cookbook (for now)
|
Install github verison of boto in aws cookbook (for now)
|
Python
|
bsd-3-clause
|
samuel/kokki
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
Install github verison of boto in aws cookbook (for now)
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
<commit_before>
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
<commit_msg>Install github verison of boto in aws cookbook (for now)<commit_after>
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
Install github verison of boto in aws cookbook (for now)
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
<commit_before>
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
<commit_msg>Install github verison of boto in aws cookbook (for now)<commit_after>
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
d7f078dca52afbd081760262498200990c318e95
|
allaccess/tests/__init__.py
|
allaccess/tests/__init__.py
|
from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
|
from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_commands import MigrateProvidersTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
|
Add import for test discovery prior to Django 1.6
|
Add import for test discovery prior to Django 1.6
|
Python
|
bsd-2-clause
|
mlavin/django-all-access,vyscond/django-all-access,iXioN/django-all-access,dpoirier/django-all-access,mlavin/django-all-access,vyscond/django-all-access,dpoirier/django-all-access,iXioN/django-all-access
|
from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
Add import for test discovery prior to Django 1.6
|
from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_commands import MigrateProvidersTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
|
<commit_before>from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
<commit_msg>Add import for test discovery prior to Django 1.6<commit_after>
|
from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_commands import MigrateProvidersTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
|
from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
Add import for test discovery prior to Django 1.6from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_commands import MigrateProvidersTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
|
<commit_before>from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
<commit_msg>Add import for test discovery prior to Django 1.6<commit_after>from .test_backends import AuthBackendTestCase
from .test_clients import OAuthClientTestCase, OAuth2ClientTestCase
from .test_commands import MigrateProvidersTestCase
from .test_context_processors import AvailableProvidersTestCase
from .test_models import ProviderTestCase, AccountAccessTestCase
from .test_views import OAuthRedirectTestCase, OAuthCallbackTestCase
|
0ae9fcccb1c67a8d9337e4ef2887fb7ea2e01d51
|
mpltools/io/core.py
|
mpltools/io/core.py
|
import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savename = os.path.join(directory, filename)
if fmt is None:
fmt = plt.rcParams.get('savefig.extension','png')
if isinstance(fmt, basestring):
fmt = [fmt]
for a_fmt in fmt:
plt.savefig(savename + '.' + a_fmt)
print ('Saved \'%s\' '% (savename + '.' + a_fmt))
except(IndexError):
pass
|
import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
fmt = fmt if fmt is not None else 'png'
if isinstance(fmt, basestring):
fmt = [fmt]
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savepath = os.path.join(directory, filename)
for a_fmt in fmt:
savename = '%s.%s' % (savepath, a_fmt)
plt.savefig(savename)
print("Saved '%s'" % savename)
except(IndexError):
pass
|
Refactor formatting of save name.
|
Refactor formatting of save name.
|
Python
|
bsd-3-clause
|
tonysyu/mpltools,matteoicardi/mpltools
|
import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savename = os.path.join(directory, filename)
if fmt is None:
fmt = plt.rcParams.get('savefig.extension','png')
if isinstance(fmt, basestring):
fmt = [fmt]
for a_fmt in fmt:
plt.savefig(savename + '.' + a_fmt)
print ('Saved \'%s\' '% (savename + '.' + a_fmt))
except(IndexError):
pass
Refactor formatting of save name.
|
import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
fmt = fmt if fmt is not None else 'png'
if isinstance(fmt, basestring):
fmt = [fmt]
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savepath = os.path.join(directory, filename)
for a_fmt in fmt:
savename = '%s.%s' % (savepath, a_fmt)
plt.savefig(savename)
print("Saved '%s'" % savename)
except(IndexError):
pass
|
<commit_before>import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savename = os.path.join(directory, filename)
if fmt is None:
fmt = plt.rcParams.get('savefig.extension','png')
if isinstance(fmt, basestring):
fmt = [fmt]
for a_fmt in fmt:
plt.savefig(savename + '.' + a_fmt)
print ('Saved \'%s\' '% (savename + '.' + a_fmt))
except(IndexError):
pass
<commit_msg>Refactor formatting of save name.<commit_after>
|
import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
fmt = fmt if fmt is not None else 'png'
if isinstance(fmt, basestring):
fmt = [fmt]
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savepath = os.path.join(directory, filename)
for a_fmt in fmt:
savename = '%s.%s' % (savepath, a_fmt)
plt.savefig(savename)
print("Saved '%s'" % savename)
except(IndexError):
pass
|
import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savename = os.path.join(directory, filename)
if fmt is None:
fmt = plt.rcParams.get('savefig.extension','png')
if isinstance(fmt, basestring):
fmt = [fmt]
for a_fmt in fmt:
plt.savefig(savename + '.' + a_fmt)
print ('Saved \'%s\' '% (savename + '.' + a_fmt))
except(IndexError):
pass
Refactor formatting of save name.import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
fmt = fmt if fmt is not None else 'png'
if isinstance(fmt, basestring):
fmt = [fmt]
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savepath = os.path.join(directory, filename)
for a_fmt in fmt:
savename = '%s.%s' % (savepath, a_fmt)
plt.savefig(savename)
print("Saved '%s'" % savename)
except(IndexError):
pass
|
<commit_before>import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savename = os.path.join(directory, filename)
if fmt is None:
fmt = plt.rcParams.get('savefig.extension','png')
if isinstance(fmt, basestring):
fmt = [fmt]
for a_fmt in fmt:
plt.savefig(savename + '.' + a_fmt)
print ('Saved \'%s\' '% (savename + '.' + a_fmt))
except(IndexError):
pass
<commit_msg>Refactor formatting of save name.<commit_after>import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
fmt = fmt if fmt is not None else 'png'
if isinstance(fmt, basestring):
fmt = [fmt]
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
if filename == '':
filename = default_name % fignum
savepath = os.path.join(directory, filename)
for a_fmt in fmt:
savename = '%s.%s' % (savepath, a_fmt)
plt.savefig(savename)
print("Saved '%s'" % savename)
except(IndexError):
pass
|
2e28cf549bd7de29143c317871008b3115e44975
|
tests/vstb-example-html5/tests/rotate.py
|
tests/vstb-example-html5/tests/rotate.py
|
# pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png')
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
|
# pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png', timeout_secs=20)
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
|
Fix virtual-stb intermittant test-failure on Travis
|
Fix virtual-stb intermittant test-failure on Travis
test_that_virtual_stb_configures_stb_tester_for_testing_virtual_stbs fails
intermittently on Travis because sometimes chrome takes longer than 10s to
start-up. This causes the test to fail with:
> MatchTimeout: Didn't find match for '.../stb-tester-350px.png' within 10
> seconds
This commit should fix that issue.
|
Python
|
lgpl-2.1
|
LewisHaley/stb-tester,LewisHaley/stb-tester,martynjarvis/stb-tester,martynjarvis/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,stb-tester/stb-tester,stb-tester/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,stb-tester/stb-tester,martynjarvis/stb-tester,stb-tester/stb-tester,martynjarvis/stb-tester,martynjarvis/stb-tester,martynjarvis/stb-tester,martynjarvis/stb-tester
|
# pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png')
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
Fix virtual-stb intermittant test-failure on Travis
test_that_virtual_stb_configures_stb_tester_for_testing_virtual_stbs fails
intermittently on Travis because sometimes chrome takes longer than 10s to
start-up. This causes the test to fail with:
> MatchTimeout: Didn't find match for '.../stb-tester-350px.png' within 10
> seconds
This commit should fix that issue.
|
# pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png', timeout_secs=20)
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
|
<commit_before># pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png')
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
<commit_msg>Fix virtual-stb intermittant test-failure on Travis
test_that_virtual_stb_configures_stb_tester_for_testing_virtual_stbs fails
intermittently on Travis because sometimes chrome takes longer than 10s to
start-up. This causes the test to fail with:
> MatchTimeout: Didn't find match for '.../stb-tester-350px.png' within 10
> seconds
This commit should fix that issue.<commit_after>
|
# pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png', timeout_secs=20)
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
|
# pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png')
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
Fix virtual-stb intermittant test-failure on Travis
test_that_virtual_stb_configures_stb_tester_for_testing_virtual_stbs fails
intermittently on Travis because sometimes chrome takes longer than 10s to
start-up. This causes the test to fail with:
> MatchTimeout: Didn't find match for '.../stb-tester-350px.png' within 10
> seconds
This commit should fix that issue.# pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png', timeout_secs=20)
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
|
<commit_before># pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png')
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
<commit_msg>Fix virtual-stb intermittant test-failure on Travis
test_that_virtual_stb_configures_stb_tester_for_testing_virtual_stbs fails
intermittently on Travis because sometimes chrome takes longer than 10s to
start-up. This causes the test to fail with:
> MatchTimeout: Didn't find match for '.../stb-tester-350px.png' within 10
> seconds
This commit should fix that issue.<commit_after># pylint: disable=F0401
from stbt import press, wait_for_match
def wait_for_vstb_startup():
wait_for_match('stb-tester-350px.png', timeout_secs=20)
def test_that_image_is_rotated_by_arrows():
press("KEY_LEFT")
wait_for_match('stb-tester-left.png')
press("KEY_RIGHT")
wait_for_match('stb-tester-right.png')
press("KEY_UP")
wait_for_match('stb-tester-up.png')
press("KEY_DOWN")
wait_for_match('stb-tester-down.png')
def test_that_image_returns_to_normal_on_OK():
press("KEY_OK")
wait_for_match('stb-tester-350px.png')
def test_that_custom_key_is_recognised():
press("KEY_CUSTOM")
wait_for_match('stb-tester-up.png', timeout_secs=1)
|
5f9ce264d8b2d16cf951a52f05dc251358783638
|
run.py
|
run.py
|
#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run()
|
#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
Make dev server visible across internal network
|
Make dev server visible across internal network
|
Python
|
mit
|
CapitalD/taplist,CapitalD/taplist,CapitalD/taplist
|
#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run()
Make dev server visible across internal network
|
#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
<commit_before>#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run()
<commit_msg>Make dev server visible across internal network<commit_after>
|
#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run()
Make dev server visible across internal network#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
<commit_before>#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run()
<commit_msg>Make dev server visible across internal network<commit_after>#!venv/bin/python
from app import app
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
8f1536ce63e276964648e2938a8200c1fb1dd3a7
|
api/utils/custom_serializers.py
|
api/utils/custom_serializers.py
|
import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
return int(time.mktime(value.timetuple()))
|
import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
try:
return int(time.mktime(value.timetuple()))
except OverflowError:
return 0
|
Fix exception on dates older then 1970
|
Fix exception on dates older then 1970
|
Python
|
apache-2.0
|
ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server
|
import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
return int(time.mktime(value.timetuple()))
Fix exception on dates older then 1970
|
import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
try:
return int(time.mktime(value.timetuple()))
except OverflowError:
return 0
|
<commit_before>import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
return int(time.mktime(value.timetuple()))
<commit_msg>Fix exception on dates older then 1970<commit_after>
|
import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
try:
return int(time.mktime(value.timetuple()))
except OverflowError:
return 0
|
import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
return int(time.mktime(value.timetuple()))
Fix exception on dates older then 1970import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
try:
return int(time.mktime(value.timetuple()))
except OverflowError:
return 0
|
<commit_before>import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
return int(time.mktime(value.timetuple()))
<commit_msg>Fix exception on dates older then 1970<commit_after>import time
from rest_framework import serializers
class TimeStampField(serializers.Field):
def to_internal_value(self, data):
pass
def to_representation(self, value):
try:
return int(time.mktime(value.timetuple()))
except OverflowError:
return 0
|
c8ad376bbb44bcae317fc09cee43cfc31dc70ded
|
src/hades/config/base.py
|
src/hades/config/base.py
|
class OptionMeta(type):
"""Metaclass for options. Classes that derive from options are registered
in a global dict"""
options = {}
def __new__(mcs, name, bases, attributes):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
mcs.options[name] = class_
return class_
class Option(object, metaclass=OptionMeta):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
|
class OptionMeta(type):
"""
Metaclass for options.
Classes with this metaclass, which are named not declared abstract by
setting the abstract keyword argument are added to the :attr:`.options`
dictionary.
"""
options = {}
def __new__(mcs, name, bases, attributes, abstract=False):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
if not abstract:
mcs.options[name] = class_
return class_
# noinspection PyUnusedLocal
def __init__(cls, name, bases, attributes, abstract=False):
super().__init__(name, bases, attributes)
class Option(object, metaclass=OptionMeta, abstract=True):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
|
Add ability to define abstract options classes
|
Add ability to define abstract options classes
Only actual options should be added to the options dict of OptionMeta. This
patch adds a abstract kwarg to the OptionMeta class, that allows declare, if an
option is abstract and should therefore not be added.
|
Python
|
mit
|
agdsn/hades,agdsn/hades,agdsn/hades,agdsn/hades,agdsn/hades
|
class OptionMeta(type):
"""Metaclass for options. Classes that derive from options are registered
in a global dict"""
options = {}
def __new__(mcs, name, bases, attributes):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
mcs.options[name] = class_
return class_
class Option(object, metaclass=OptionMeta):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
Add ability to define abstract options classes
Only actual options should be added to the options dict of OptionMeta. This
patch adds a abstract kwarg to the OptionMeta class, that allows declare, if an
option is abstract and should therefore not be added.
|
class OptionMeta(type):
"""
Metaclass for options.
Classes with this metaclass, which are named not declared abstract by
setting the abstract keyword argument are added to the :attr:`.options`
dictionary.
"""
options = {}
def __new__(mcs, name, bases, attributes, abstract=False):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
if not abstract:
mcs.options[name] = class_
return class_
# noinspection PyUnusedLocal
def __init__(cls, name, bases, attributes, abstract=False):
super().__init__(name, bases, attributes)
class Option(object, metaclass=OptionMeta, abstract=True):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
|
<commit_before>class OptionMeta(type):
"""Metaclass for options. Classes that derive from options are registered
in a global dict"""
options = {}
def __new__(mcs, name, bases, attributes):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
mcs.options[name] = class_
return class_
class Option(object, metaclass=OptionMeta):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
<commit_msg>Add ability to define abstract options classes
Only actual options should be added to the options dict of OptionMeta. This
patch adds a abstract kwarg to the OptionMeta class, that allows declare, if an
option is abstract and should therefore not be added.<commit_after>
|
class OptionMeta(type):
"""
Metaclass for options.
Classes with this metaclass, which are named not declared abstract by
setting the abstract keyword argument are added to the :attr:`.options`
dictionary.
"""
options = {}
def __new__(mcs, name, bases, attributes, abstract=False):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
if not abstract:
mcs.options[name] = class_
return class_
# noinspection PyUnusedLocal
def __init__(cls, name, bases, attributes, abstract=False):
super().__init__(name, bases, attributes)
class Option(object, metaclass=OptionMeta, abstract=True):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
|
class OptionMeta(type):
"""Metaclass for options. Classes that derive from options are registered
in a global dict"""
options = {}
def __new__(mcs, name, bases, attributes):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
mcs.options[name] = class_
return class_
class Option(object, metaclass=OptionMeta):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
Add ability to define abstract options classes
Only actual options should be added to the options dict of OptionMeta. This
patch adds a abstract kwarg to the OptionMeta class, that allows declare, if an
option is abstract and should therefore not be added.class OptionMeta(type):
"""
Metaclass for options.
Classes with this metaclass, which are named not declared abstract by
setting the abstract keyword argument are added to the :attr:`.options`
dictionary.
"""
options = {}
def __new__(mcs, name, bases, attributes, abstract=False):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
if not abstract:
mcs.options[name] = class_
return class_
# noinspection PyUnusedLocal
def __init__(cls, name, bases, attributes, abstract=False):
super().__init__(name, bases, attributes)
class Option(object, metaclass=OptionMeta, abstract=True):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
|
<commit_before>class OptionMeta(type):
"""Metaclass for options. Classes that derive from options are registered
in a global dict"""
options = {}
def __new__(mcs, name, bases, attributes):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
mcs.options[name] = class_
return class_
class Option(object, metaclass=OptionMeta):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
<commit_msg>Add ability to define abstract options classes
Only actual options should be added to the options dict of OptionMeta. This
patch adds a abstract kwarg to the OptionMeta class, that allows declare, if an
option is abstract and should therefore not be added.<commit_after>class OptionMeta(type):
"""
Metaclass for options.
Classes with this metaclass, which are named not declared abstract by
setting the abstract keyword argument are added to the :attr:`.options`
dictionary.
"""
options = {}
def __new__(mcs, name, bases, attributes, abstract=False):
if name in mcs.options:
raise TypeError("An option named {} is already defined."
.format(name))
class_ = super(OptionMeta, mcs).__new__(mcs, name, bases, attributes)
if not abstract:
mcs.options[name] = class_
return class_
# noinspection PyUnusedLocal
def __init__(cls, name, bases, attributes, abstract=False):
super().__init__(name, bases, attributes)
class Option(object, metaclass=OptionMeta, abstract=True):
default = None
type = None
runtime_check = None
static_check = None
class ConfigError(Exception):
def __init__(self, *args, option=None, **kwargs):
super(ConfigError, self).__init__(*args, **kwargs)
self.option = option
def __str__(self):
return "{}: {}".format(self.option, super(ConfigError, self).__str__())
class MissingOptionError(ConfigError):
def __init__(self, *args, **kwargs):
super(MissingOptionError, self).__init__(*args, **kwargs)
def coerce(value):
if isinstance(value, type) and issubclass(value, Option):
return value.__name__
else:
return value
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.