commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
a31a3a9fd5f9f26bc9e06b7c682a9544f71806ad | tests/basics/dict_fromkeys.py | tests/basics/dict_fromkeys.py | d = dict.fromkeys([1, 2, 3, 4])
l = list(d.keys())
l.sort()
print(l)
d = dict.fromkeys([1, 2, 3, 4], 42)
l = list(d.values())
l.sort()
print(l)
| d = dict.fromkeys([1, 2, 3, 4])
l = list(d.keys())
l.sort()
print(l)
d = dict.fromkeys([1, 2, 3, 4], 42)
l = list(d.values())
l.sort()
print(l)
# argument to fromkeys is a generator
d = dict.fromkeys(i + 1 for i in range(1))
print(d)
| Add test for dict.fromkeys where arg is a generator. | tests/basics: Add test for dict.fromkeys where arg is a generator.
Improves coverage because it tests the case where the arg does not have a
__len__ slot.
| Python | mit | AriZuu/micropython,toolmacher/micropython,deshipu/micropython,infinnovation/micropython,swegener/micropython,adafruit/micropython,adafruit/micropython,TDAbboud/micropython,torwag/micropython,hiway/micropython,MrSurly/micropython,TDAbboud/micropython,trezor/micropython,cwyark/micropython,swegener/micropython,pozetroninc/micropython,swegener/micropython,deshipu/micropython,adafruit/circuitpython,infinnovation/micropython,selste/micropython,deshipu/micropython,infinnovation/micropython,pozetroninc/micropython,PappaPeppar/micropython,TDAbboud/micropython,kerneltask/micropython,adafruit/circuitpython,toolmacher/micropython,alex-robbins/micropython,PappaPeppar/micropython,MrSurly/micropython,AriZuu/micropython,tobbad/micropython,lowRISC/micropython,tuc-osg/micropython,Peetz0r/micropython-esp32,pfalcon/micropython,ryannathans/micropython,MrSurly/micropython-esp32,toolmacher/micropython,blazewicz/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,lowRISC/micropython,MrSurly/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,torwag/micropython,infinnovation/micropython,infinnovation/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,Peetz0r/micropython-esp32,henriknelson/micropython,ryannathans/micropython,kerneltask/micropython,alex-robbins/micropython,micropython/micropython-esp32,mhoffma/micropython,chrisdearman/micropython,pramasoul/micropython,tobbad/micropython,ryannathans/micropython,puuu/micropython,oopy/micropython,Peetz0r/micropython-esp32,micropython/micropython-esp32,PappaPeppar/micropython,SHA2017-badge/micropython-esp32,cwyark/micropython,micropython/micropython-esp32,PappaPeppar/micropython,ryannathans/micropython,henriknelson/micropython,blazewicz/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,dmazzella/micropython,bvernoux/micropython,trezor/micropython,alex-robbins/micropython,adafruit/circuitpython,pramasoul/micropython,trezor/micropython,bvernoux/micropython,bvernoux/micropython,pfalcon/micropython,tralamazza/micropython,lowRISC/micropython,pramasoul/micropython,kerneltask/micropython,blazewicz/micropython,pfalcon/micropython,TDAbboud/micropython,chrisdearman/micropython,lowRISC/micropython,adafruit/circuitpython,torwag/micropython,selste/micropython,AriZuu/micropython,oopy/micropython,dmazzella/micropython,oopy/micropython,tralamazza/micropython,hiway/micropython,HenrikSolver/micropython,Timmenem/micropython,chrisdearman/micropython,tobbad/micropython,MrSurly/micropython-esp32,mhoffma/micropython,adafruit/micropython,puuu/micropython,pozetroninc/micropython,cwyark/micropython,blazewicz/micropython,alex-robbins/micropython,puuu/micropython,pramasoul/micropython,henriknelson/micropython,adafruit/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,lowRISC/micropython,pramasoul/micropython,selste/micropython,Peetz0r/micropython-esp32,mhoffma/micropython,pozetroninc/micropython,adafruit/micropython,cwyark/micropython,PappaPeppar/micropython,toolmacher/micropython,deshipu/micropython,pfalcon/micropython,torwag/micropython,MrSurly/micropython-esp32,bvernoux/micropython,Timmenem/micropython,hiway/micropython,selste/micropython,toolmacher/micropython,SHA2017-badge/micropython-esp32,TDAbboud/micropython,Timmenem/micropython,chrisdearman/micropython,henriknelson/micropython,blazewicz/micropython,swegener/micropython,puuu/micropython,puuu/micropython,tuc-osg/micropython,deshipu/micropython,HenrikSolver/micropython,hiway/micropython,tobbad/micropython,dmazzella/micropython,MrSurly/micropython,mhoffma/micropython,tobbad/micropython,kerneltask/micropython,adafruit/circuitpython,Peetz0r/micropython-esp32,tralamazza/micropython,tuc-osg/micropython,cwyark/micropython,dmazzella/micropython,kerneltask/micropython,ryannathans/micropython,oopy/micropython,alex-robbins/micropython,micropython/micropython-esp32,HenrikSolver/micropython,HenrikSolver/micropython,selste/micropython,micropython/micropython-esp32,pfalcon/micropython,torwag/micropython,AriZuu/micropython,HenrikSolver/micropython,trezor/micropython,tuc-osg/micropython,bvernoux/micropython,Timmenem/micropython,adafruit/circuitpython,mhoffma/micropython,Timmenem/micropython,trezor/micropython,AriZuu/micropython,oopy/micropython,tuc-osg/micropython,swegener/micropython,MrSurly/micropython | ---
+++
@@ -8,3 +8,6 @@
l.sort()
print(l)
+# argument to fromkeys is a generator
+d = dict.fromkeys(i + 1 for i in range(1))
+print(d) |
9ee301c525600cfeb8b8ca3d59f75ff9b7823008 | test/buildbot/buildbot_config/master/schedulers.py | test/buildbot/buildbot_config/master/schedulers.py | """
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import SingleBranchScheduler
def get_schedulers():
# Run the unit tests for master
master_unit = SingleBranchScheduler(name="full",
change_filter=ChangeFilter(branch="master"),
treeStableTimer=60,
builderNames=["vagrant-master-unit"])
return [master_unit]
| """
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import (
Dependent,
SingleBranchScheduler)
def get_schedulers():
# Run the unit tests for master
master_unit = SingleBranchScheduler(name="master-unit",
change_filter=ChangeFilter(branch="master"),
treeStableTimer=60,
builderNames=["vagrant-master-unit"])
master_acceptance = Dependent(name="master-acceptance",
upstream=master_unit,
builderNames=["vagrant-master-acceptance"])
return [master_unit, master_acceptance]
| Make the acceptance tests dependent on the unit tests passing | Buildbot: Make the acceptance tests dependent on the unit tests passing
| Python | mit | zsjohny/vagrant,bheuvel/vagrant,lonniev/vagrant,dharmab/vagrant,petems/vagrant,tjanez/vagrant,senglin/vagrant,benh57/vagrant,lonniev/vagrant,tomfanning/vagrant,cgvarela/vagrant,gpkfr/vagrant,krig/vagrant,philoserf/vagrant,philwrenn/vagrant,modulexcite/vagrant,tschortsch/vagrant,bmhatfield/vagrant,mitchellh/vagrant,tbarrongh/vagrant,apertoso/vagrant,marxarelli/vagrant,chrisroberts/vagrant,doy/vagrant,otagi/vagrant,gitebra/vagrant,mpoeter/vagrant,invernizzi-at-google/vagrant,bshurts/vagrant,mkuzmin/vagrant,chrisvire/vagrant,jfchevrette/vagrant,kalabiyau/vagrant,juiceinc/vagrant,dustymabe/vagrant,gpkfr/vagrant,tjanez/vagrant,fnewberg/vagrant,mpoeter/vagrant,muhanadra/vagrant,jberends/vagrant,mkuzmin/vagrant,wkolean/vagrant,mitchellh/vagrant,bmhatfield/vagrant,stephancom/vagrant,kamazee/vagrant,ferventcoder/vagrant,myrjola/vagrant,darkn3rd/vagrant,tknerr/vagrant,sideci-sample/sideci-sample-vagrant,kamigerami/vagrant,johntron/vagrant,krig/vagrant,Endika/vagrant,gpkfr/vagrant,crashlytics/vagrant,patrys/vagrant,sferik/vagrant,Endika/vagrant,sax/vagrant,jhoblitt/vagrant,tschortsch/vagrant,legal90/vagrant,h4ck3rm1k3/vagrant,teotihuacanada/vagrant,pwnall/vagrant,samphippen/vagrant,PatrickLang/vagrant,senglin/vagrant,mwrock/vagrant,blueyed/vagrant,janek-warchol/vagrant,jmanero/vagrant,Avira/vagrant,loren-osborn/vagrant,carlosefr/vagrant,h4ck3rm1k3/vagrant,genome21/vagrant,mkuzmin/vagrant,signed8bit/vagrant,aaam/vagrant,jean/vagrant,miguel250/vagrant,webcoyote/vagrant,tknerr/vagrant,blueyed/vagrant,kalabiyau/vagrant,theist/vagrant,shtouff/vagrant,invernizzi-at-google/vagrant,dharmab/vagrant,bshurts/vagrant,mwrock/vagrant,tschortsch/vagrant,bdwyertech/vagrant,wangfakang/vagrant,juiceinc/vagrant,channui/vagrant,dustymabe/vagrant,sax/vagrant,webcoyote/vagrant,philoserf/vagrant,bryson/vagrant,tbriggs-curse/vagrant,PatOShea/vagrant,ArloL/vagrant,doy/vagrant,mwrock/vagrant,Chhed13/vagrant,zsjohny/vagrant,vamegh/vagrant,sni/vagrant,taliesins/vagrant,iNecas/vagrant,ferventcoder/vagrant,tbriggs-curse/vagrant,muhanadra/vagrant,taliesins/vagrant,stephancom/vagrant,kalabiyau/vagrant,Chhunlong/vagrant,tbarrongh/vagrant,loren-osborn/vagrant,apertoso/vagrant,jhoblitt/vagrant,tbarrongh/vagrant,genome21/vagrant,channui/vagrant,krig/vagrant,gbarberi/vagrant,janek-warchol/vagrant,genome21/vagrant,chrisvire/vagrant,bdwyertech/vagrant,jean/vagrant,bdwyertech/vagrant,crashlytics/vagrant,blueyed/vagrant,Ninir/vagrant,myrjola/vagrant,cgvarela/vagrant,webcoyote/vagrant,jtopper/vagrant,gajdaw/vagrant,pwnall/vagrant,ArloL/vagrant,wkolean/vagrant,jean/vagrant,stephancom/vagrant,gitebra/vagrant,p0deje/vagrant,Avira/vagrant,nickryand/vagrant,evverx/vagrant,jmanero/vagrant,janek-warchol/vagrant,tomfanning/vagrant,evverx/vagrant,jean/vagrant,benizi/vagrant,Ninir/vagrant,obnoxxx/vagrant,tbriggs-curse/vagrant,miguel250/vagrant,mitchellh/vagrant,mwarren/vagrant,h4ck3rm1k3/vagrant,benh57/vagrant,jfchevrette/vagrant,teotihuacanada/vagrant,bdwyertech/vagrant,Chhed13/vagrant,doy/vagrant,otagi/vagrant,wangfakang/vagrant,shtouff/vagrant,sideci-sample/sideci-sample-vagrant,marxarelli/vagrant,mwarren/vagrant,sferik/vagrant,sferik/vagrant,bheuvel/vagrant,marxarelli/vagrant,lukebakken/vagrant,darkn3rd/vagrant,carlosefr/vagrant,sax/vagrant,fnewberg/vagrant,signed8bit/vagrant,johntron/vagrant,glensc/vagrant,dhoer/vagrant,Sgoettschkes/vagrant,carlosefr/vagrant,kamigerami/vagrant,p0deje/vagrant,samphippen/vagrant,dustymabe/vagrant,MiLk/vagrant,ferventcoder/vagrant,Sgoettschkes/vagrant,Endika/vagrant,bmhatfield/vagrant,gitebra/vagrant,janek-warchol/vagrant,aaam/vagrant,obnoxxx/vagrant,miguel250/vagrant,nickryand/vagrant,gajdaw/vagrant,lukebakken/vagrant,tknerr/vagrant,iNecas/vagrant,dhoer/vagrant,legal90/vagrant,jberends/vagrant,mitchellh/vagrant,mephaust/vagrant,h4ck3rm1k3/vagrant,tomfanning/vagrant,chrisroberts/vagrant,Avira/vagrant,benh57/vagrant,bheuvel/vagrant,miguel250/vagrant,vamegh/vagrant,apertoso/vagrant,modulexcite/vagrant,iNecas/vagrant,jtopper/vagrant,taliesins/vagrant,juiceinc/vagrant,wangfakang/vagrant,marxarelli/vagrant,webcoyote/vagrant,bshurts/vagrant,sni/vagrant,denisbr/vagrant,jfchevrette/vagrant,PatOShea/vagrant,jberends/vagrant,patrys/vagrant,teotihuacanada/vagrant,Chhunlong/vagrant,lonniev/vagrant,patrys/vagrant,wkolean/vagrant,fnewberg/vagrant,ianmiell/vagrant,philwrenn/vagrant,philwrenn/vagrant,jberends/vagrant,glensc/vagrant,jhoblitt/vagrant,cgvarela/vagrant,gbarberi/vagrant,philoserf/vagrant,chrisroberts/vagrant,dharmab/vagrant,loren-osborn/vagrant,tschortsch/vagrant,mwarren/vagrant,mwrock/vagrant,darkn3rd/vagrant,sni/vagrant,ianmiell/vagrant,senglin/vagrant,theist/vagrant,senglin/vagrant,evverx/vagrant,denisbr/vagrant,bheuvel/vagrant,myrjola/vagrant,fnewberg/vagrant,tbarrongh/vagrant,jmanero/vagrant,obnoxxx/vagrant,genome21/vagrant,ianmiell/vagrant,jkburges/vagrant,MiLk/vagrant,carlosefr/vagrant,mpoeter/vagrant,otagi/vagrant,aneeshusa/vagrant,stephancom/vagrant,jtopper/vagrant,msabramo/vagrant,jkburges/vagrant,chrisroberts/vagrant,cgvarela/vagrant,blueyed/vagrant,lukebakken/vagrant,legal90/vagrant,benizi/vagrant,juiceinc/vagrant,nickryand/vagrant,kamazee/vagrant,nickryand/vagrant,msabramo/vagrant,samphippen/vagrant,bryson/vagrant,invernizzi-at-google/vagrant,mkuzmin/vagrant,lukebakken/vagrant,mwarren/vagrant,TheBigBear/vagrant,Chhed13/vagrant,Chhed13/vagrant,gitebra/vagrant,benh57/vagrant,justincampbell/vagrant,ianmiell/vagrant,PatrickLang/vagrant,TheBigBear/vagrant,signed8bit/vagrant,jtopper/vagrant,shtouff/vagrant,theist/vagrant,tjanez/vagrant,Avira/vagrant,aneeshusa/vagrant,clinstid/vagrant,tjanez/vagrant,Chhunlong/vagrant,crashlytics/vagrant,MiLk/vagrant,bryson/vagrant,justincampbell/vagrant,rivy/vagrant,denisbr/vagrant,sni/vagrant,taliesins/vagrant,zsjohny/vagrant,ferventcoder/vagrant,msabramo/vagrant,muhanadra/vagrant,kalabiyau/vagrant,doy/vagrant,legal90/vagrant,BlakeMesdag/vagrant,apertoso/vagrant,PatOShea/vagrant,jfchevrette/vagrant,theist/vagrant,johntron/vagrant,chrisvire/vagrant,justincampbell/vagrant,vamegh/vagrant,rivy/vagrant,aaam/vagrant,aneeshusa/vagrant,kamazee/vagrant,dhoer/vagrant,mpoeter/vagrant,petems/vagrant,invernizzi-at-google/vagrant,TheBigBear/vagrant,TheBigBear/vagrant,gbarberi/vagrant,petems/vagrant,clinstid/vagrant,clinstid/vagrant,Chhunlong/vagrant,myrjola/vagrant,darkn3rd/vagrant,pwnall/vagrant,kamigerami/vagrant,jhoblitt/vagrant,sax/vagrant,wkolean/vagrant,lonniev/vagrant,otagi/vagrant,PatrickLang/vagrant,p0deje/vagrant,samphippen/vagrant,kamazee/vagrant,benizi/vagrant,philwrenn/vagrant,tbriggs-curse/vagrant,zsjohny/vagrant,modulexcite/vagrant,teotihuacanada/vagrant,petems/vagrant,tknerr/vagrant,aaam/vagrant,rivy/vagrant,signed8bit/vagrant,jkburges/vagrant,Endika/vagrant,channui/vagrant,modulexcite/vagrant,vamegh/vagrant,muhanadra/vagrant,rivy/vagrant,denisbr/vagrant,justincampbell/vagrant,msabramo/vagrant,philoserf/vagrant,loren-osborn/vagrant,sideci-sample/sideci-sample-vagrant,jkburges/vagrant,ArloL/vagrant,BlakeMesdag/vagrant,mephaust/vagrant,krig/vagrant,tomfanning/vagrant,jmanero/vagrant,PatOShea/vagrant,crashlytics/vagrant,BlakeMesdag/vagrant,gbarberi/vagrant,gajdaw/vagrant,mephaust/vagrant,aneeshusa/vagrant,shtouff/vagrant,ArloL/vagrant,bryson/vagrant,pwnall/vagrant,dustymabe/vagrant,chrisvire/vagrant,Sgoettschkes/vagrant,gpkfr/vagrant,kamigerami/vagrant,johntron/vagrant,wangfakang/vagrant,PatrickLang/vagrant,benizi/vagrant,mephaust/vagrant,Ninir/vagrant,dharmab/vagrant,dhoer/vagrant,Sgoettschkes/vagrant,bshurts/vagrant,patrys/vagrant | ---
+++
@@ -4,13 +4,19 @@
"""
from buildbot.changes.filter import ChangeFilter
-from buildbot.schedulers.basic import SingleBranchScheduler
+from buildbot.schedulers.basic import (
+ Dependent,
+ SingleBranchScheduler)
def get_schedulers():
# Run the unit tests for master
- master_unit = SingleBranchScheduler(name="full",
+ master_unit = SingleBranchScheduler(name="master-unit",
change_filter=ChangeFilter(branch="master"),
treeStableTimer=60,
builderNames=["vagrant-master-unit"])
- return [master_unit]
+ master_acceptance = Dependent(name="master-acceptance",
+ upstream=master_unit,
+ builderNames=["vagrant-master-acceptance"])
+
+ return [master_unit, master_acceptance] |
727939269aef168513ad6d62913e20f0af95b4e6 | dduplicated/hashs.py | dduplicated/hashs.py | import hashlib
import os
def get_hash(path):
return get_md5(path)
def get_md5(path):
hash_md5 = hashlib.md5()
if os.path.isfile(path):
with open(path, "rb") as file:
while True:
buffer = file.read(4096)
if not buffer:
break
hash_md5.update(buffer)
return hash_md5.hexdigest()
| import hashlib
import os
def get_hash(path):
return get_md5(path)
# MD5 methods is based on second answer from: https://exceptionshub.com/get-md5-hash-of-big-files-in-python.html
def get_md5(path):
hash_md5 = hashlib.md5()
if os.path.isfile(path):
with open(path, "rb") as file:
while True:
buffer = file.read(4096)
if not buffer:
break
hash_md5.update(buffer)
return hash_md5.hexdigest()
| Add reference to md5 method. | Add reference to md5 method. | Python | mit | messiasthi/dduplicated-cli | ---
+++
@@ -3,7 +3,7 @@
def get_hash(path):
return get_md5(path)
-
+# MD5 methods is based on second answer from: https://exceptionshub.com/get-md5-hash-of-big-files-in-python.html
def get_md5(path):
hash_md5 = hashlib.md5()
if os.path.isfile(path): |
9cc3cf8a2911fedce7f08d2412388154c24a9ed1 | engine.py | engine.py | # Use x, y coords for unit positions
# (97, 56) ... (104, 56)
# ... ...
# (97, 49) ... (104, 49)
#
# Algebraic notation for a position is:
# algebraic_pos = chr(x) + chr(y)
def _coord_to_algebraic(coord):
x, y = coord
return chr(x) + chr(y)
def _algebraic_to_coord(algebraic):
x, y = algebraic[0], algebraic[1]
return ord(x), ord(y)
def _is_pos_on_board(coord):
u"""Return True if coordinate is on the board."""
x, y = coord
if (97 <= x <= 104) and (49 <= y <= 56):
return True
else:
return False
| # Use x, y coords for unit positions
# (97, 56) ... (104, 56)
# ... ...
# (97, 49) ... (104, 49)
#
# Algebraic notation for a position is:
# algebraic_pos = chr(x) + chr(y)
def _coord_to_algebraic(coord):
x, y = coord
return chr(x) + chr(y)
def _algebraic_to_coord(algebraic):
x, y = algebraic[0], algebraic[1]
return ord(x), ord(y)
def _is_pos_on_board(coord):
u"""Return True if coordinate is on the board."""
x, y = coord
if (97 <= x <= 104) and (49 <= y <= 56):
return True
else:
return False
class Piece(object):
"""Parent class for chess pieces."""
def __init__(self, pos):
"""Instantiate a piece at a coordinate position."""
super(Piece, self).__init__()
if isinstance(pos, str):
self.x, self.y = _algebraic_to_coord(pos)
else:
self.x, self.y = pos
| Add Piece() to serve as the parent class for all chess pieces | Add Piece() to serve as the parent class for all chess pieces
| Python | mit | EyuelAbebe/gamer,EyuelAbebe/gamer | ---
+++
@@ -24,3 +24,14 @@
return True
else:
return False
+
+
+class Piece(object):
+ """Parent class for chess pieces."""
+ def __init__(self, pos):
+ """Instantiate a piece at a coordinate position."""
+ super(Piece, self).__init__()
+ if isinstance(pos, str):
+ self.x, self.y = _algebraic_to_coord(pos)
+ else:
+ self.x, self.y = pos |
3784b04109b2ca92633a788cc02562898064282c | factor.py | factor.py | import numpy as np
def LU(A):
m = A.shape[0]
U = A.copy()
L = np.eye( m )
for j in range(m):
for i in range(j+1,m):
L[i,j] = U[i,j]/U[j,j]
U[i,:] -= L[i,j]*U[j,:]
return L, U
| import numpy as np
def LU(A):
r"""Factor a square matrix by Gaussian elimination.
The argument A should be a square matrix (an m-by-m numpy array).
The outputs L and U are also m-by-m. L is lower-triangular with
unit diagonal entries and U is strictly upper-triangular.
This implementation does not use pivoting and can be unstable
for moderately large matrices, due to amplification of roundoff errors.
See, e.g., Lectures 20-22 of the book by Trefethen & Bau for a discussion.
Example::
>>> import factor
>>> import numpy as np
>>> A = np.array([[2.,1.,1.,0.],[4.,3.,3.,1.],[8.,7.,9.,5.],[6.,7.,9.,8.]])
>>> print A
[[ 2. 1. 1. 0.]
[ 4. 3. 3. 1.]
[ 8. 7. 9. 5.]
[ 6. 7. 9. 8.]]
>>> L, U = factor.LU(A)
>>> print L
[[ 1. 0. 0. 0.]
[ 2. 1. 0. 0.]
[ 4. 3. 1. 0.]
[ 3. 4. 1. 1.]]
>>> print U
[[ 2. 1. 1. 0.]
[ 0. 1. 1. 1.]
[ 0. 0. 2. 2.]
[ 0. 0. 0. 2.]]
"""
m = A.shape[0]
U = A.copy()
L = np.eye( m )
for j in range(m):
for i in range(j+1,m):
L[i,j] = U[i,j]/U[j,j]
U[i,:] -= L[i,j]*U[j,:]
return L, U
| Add docstring, with an example. | Add docstring, with an example.
| Python | bsd-2-clause | ketch/rock-solid-code-demo | ---
+++
@@ -1,6 +1,39 @@
import numpy as np
def LU(A):
+ r"""Factor a square matrix by Gaussian elimination.
+
+ The argument A should be a square matrix (an m-by-m numpy array).
+
+ The outputs L and U are also m-by-m. L is lower-triangular with
+ unit diagonal entries and U is strictly upper-triangular.
+
+ This implementation does not use pivoting and can be unstable
+ for moderately large matrices, due to amplification of roundoff errors.
+ See, e.g., Lectures 20-22 of the book by Trefethen & Bau for a discussion.
+
+ Example::
+
+ >>> import factor
+ >>> import numpy as np
+ >>> A = np.array([[2.,1.,1.,0.],[4.,3.,3.,1.],[8.,7.,9.,5.],[6.,7.,9.,8.]])
+ >>> print A
+ [[ 2. 1. 1. 0.]
+ [ 4. 3. 3. 1.]
+ [ 8. 7. 9. 5.]
+ [ 6. 7. 9. 8.]]
+ >>> L, U = factor.LU(A)
+ >>> print L
+ [[ 1. 0. 0. 0.]
+ [ 2. 1. 0. 0.]
+ [ 4. 3. 1. 0.]
+ [ 3. 4. 1. 1.]]
+ >>> print U
+ [[ 2. 1. 1. 0.]
+ [ 0. 1. 1. 1.]
+ [ 0. 0. 2. 2.]
+ [ 0. 0. 0. 2.]]
+ """
m = A.shape[0]
U = A.copy()
L = np.eye( m ) |
e26a49220835cd3df14820be7b400dc045092bb9 | examples/load_ui_base_instance.py | examples/load_ui_base_instance.py | import sys
import os
os.environ["QT_PREFERRED_BINDING"] = "PySide"
from Qt import QtWidgets, load_ui
def setup_ui(uifile, base_instance=None):
ui = load_ui(uifile)
if not base_instance:
return ui
else:
for member in dir(ui):
if not member.startswith('__') and \
member is not 'staticMetaObject':
setattr(base_instance, member, getattr(ui, member))
return ui
class MainWindow(QtWidgets.QWidget):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
setup_ui('examples/load_ui_qwidget.ui', self)
def test_load_ui_setup_ui_wrapper():
"""Example: load_ui with setup_ui wrapper
"""
app = QtWidgets.QApplication(sys.argv)
window = MainWindow()
# Tests
assert isinstance(window.__class__, type(QtWidgets.QWidget))
assert isinstance(window.parent(), type(None))
assert isinstance(window.lineEdit.__class__, type(QtWidgets.QWidget))
assert window.lineEdit.text() == ''
window.lineEdit.setText('Hello')
assert window.lineEdit.text() == 'Hello'
app.exit()
| import sys
import os
# Set preferred binding
# os.environ["QT_PREFERRED_BINDING"] = "PySide"
from Qt import QtWidgets, load_ui
def setup_ui(uifile, base_instance=None):
ui = load_ui(uifile)
if not base_instance:
return ui
else:
for member in dir(ui):
if not member.startswith('__') and \
member is not 'staticMetaObject':
setattr(base_instance, member, getattr(ui, member))
return ui
class MainWindow(QtWidgets.QWidget):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
setup_ui('examples/load_ui_qwidget.ui', self)
def test_load_ui_setup_ui_wrapper():
"""Example: load_ui with setup_ui wrapper
"""
app = QtWidgets.QApplication(sys.argv)
window = MainWindow()
# Tests
assert isinstance(window.__class__, QtWidgets.QWidget.__class__)
assert isinstance(window.parent(), type(None))
assert isinstance(window.lineEdit.__class__, QtWidgets.QWidget.__class__)
assert window.lineEdit.text() == ''
window.lineEdit.setText('Hello')
assert window.lineEdit.text() == 'Hello'
app.exit()
| Remove preffered binding, use obj.__class__ instead of type() | Remove preffered binding, use obj.__class__ instead of type()
| Python | mit | mottosso/Qt.py,fredrikaverpil/Qt.py,mottosso/Qt.py,fredrikaverpil/Qt.py | ---
+++
@@ -1,7 +1,9 @@
import sys
import os
-os.environ["QT_PREFERRED_BINDING"] = "PySide"
+# Set preferred binding
+# os.environ["QT_PREFERRED_BINDING"] = "PySide"
+
from Qt import QtWidgets, load_ui
@@ -30,9 +32,9 @@
window = MainWindow()
# Tests
- assert isinstance(window.__class__, type(QtWidgets.QWidget))
+ assert isinstance(window.__class__, QtWidgets.QWidget.__class__)
assert isinstance(window.parent(), type(None))
- assert isinstance(window.lineEdit.__class__, type(QtWidgets.QWidget))
+ assert isinstance(window.lineEdit.__class__, QtWidgets.QWidget.__class__)
assert window.lineEdit.text() == ''
window.lineEdit.setText('Hello')
assert window.lineEdit.text() == 'Hello' |
07517c43b3d61431e8c7c40ea5e8b545b353bee4 | imagersite/imagersite/urls.py | imagersite/imagersite/urls.py | """imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
| """imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
| Add static url to urlconf | Add static url to urlconf
| Python | mit | jesseklein406/django-imager,jesseklein406/django-imager,jesseklein406/django-imager | ---
+++
@@ -14,8 +14,16 @@
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
+from django.conf.urls.static import static
from django.contrib import admin
+from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
+
+if settings.DEBUG:
+ urlpatterns += static(
+ settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
+ )
+ |
1975b33b5f251198b59a772a38b6302fbea89017 | tests/test_create_template.py | tests/test_create_template.py | # -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pytest
import subprocess
def run_tox(plugin):
"""Run the tox suite of the newly created plugin."""
try:
subprocess.check_call([
'tox',
plugin,
'-c', os.path.join(plugin, 'tox.ini'),
'-e', 'py'
])
except subprocess.CalledProcessError as e:
pytest.fail(e)
def test_run_cookiecutter_and_plugin_tests(cookies):
"""Create a new plugin via cookiecutter and run its tests."""
result = cookies.bake()
assert result.project.basename == 'pytest-foobar'
assert result.project.isdir()
run_tox(str(result.project))
| # -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pytest
import subprocess
def run_tox(plugin):
"""Run the tox suite of the newly created plugin."""
try:
subprocess.check_call([
'tox',
plugin,
'-c', os.path.join(plugin, 'tox.ini'),
'-e', 'py'
])
except subprocess.CalledProcessError as e:
pytest.fail(e)
def test_run_cookiecutter_and_plugin_tests(cookies):
"""Create a new plugin via cookiecutter and run its tests."""
result = cookies.bake()
assert result.exit_code == 0
assert result.exception is None
assert result.project.basename == 'pytest-foobar'
assert result.project.isdir()
run_tox(str(result.project))
| Extend test to check for the exit code and for an exception | Extend test to check for the exit code and for an exception
| Python | mit | pytest-dev/cookiecutter-pytest-plugin | ---
+++
@@ -28,6 +28,8 @@
"""Create a new plugin via cookiecutter and run its tests."""
result = cookies.bake()
+ assert result.exit_code == 0
+ assert result.exception is None
assert result.project.basename == 'pytest-foobar'
assert result.project.isdir()
|
36213a31a1870cf38ec0ce3d208c6a2072e2b133 | acapi/tests/test_client.py | acapi/tests/test_client.py | import os
import requests
import requests_mock
import unittest
from .. import Client
@requests_mock.Mocker()
class TestClient(unittest.TestCase):
"""Tests the Acquia Cloud API client class."""
req = None
"""
def setup(self, ):
" ""
Set up the tests with the mock requests handler.
" ""
session = requests.Session()
adapter = requests_mock.Adapter()
session.mount('mock', adapter)
"""
def test_find_credentials(self, m):
"""
Tests finding the credentials in environment variables
"""
os.environ['ACQUIA_CLOUD_API_USER'] = 'user'
os.environ['ACQUIA_CLOUD_API_TOKEN'] = 'token'
client = Client(cache=None)
(user, token) = client._Client__find_credentials()
self.assertEqual(user, 'user')
self.assertEqual(token, 'token')
def test_user(self, m):
email = 'user@example.com'
m.register_uri('GET',
'https://cloudapi.acquia.com/v1/me.json',
json={"authenticated_as": email}
)
client = Client(email, 'token')
user = client.user().get()
self.assertEqual(user['authenticated_as'], email)
if __name__ == '__main__':
unittest.main()
| import os
import requests
import requests_mock
import unittest
from .. import Client
from ..exceptions import AcquiaCloudException
@requests_mock.Mocker()
class TestClient(unittest.TestCase):
"""Tests the Acquia Cloud API client class."""
req = None
"""
def setup(self, ):
" ""
Set up the tests with the mock requests handler.
" ""
session = requests.Session()
adapter = requests_mock.Adapter()
session.mount('mock', adapter)
"""
def test_find_credentials(self, m):
"""
Tests finding the credentials in environment variables
"""
os.environ['ACQUIA_CLOUD_API_USER'] = 'user'
os.environ['ACQUIA_CLOUD_API_TOKEN'] = 'token'
client = Client(cache=None)
(user, token) = client._Client__find_credentials()
self.assertEqual(user, 'user')
self.assertEqual(token, 'token')
def test_find_credentials_none_set(self, m):
"""
Tests finding the credentials in environment variables with empty credentials
"""
os.environ['ACQUIA_CLOUD_API_USER'] = ''
os.environ['ACQUIA_CLOUD_API_TOKEN'] = ''
with self.assertRaises(AcquiaCloudException) as cm:
client = Client(cache=None)
self.assertEqual(str(cm.exception), 'Credentials not provided')
def test_user(self, m):
email = 'user@example.com'
m.register_uri('GET',
'https://cloudapi.acquia.com/v1/me.json',
json={"authenticated_as": email}
)
client = Client(email, 'token')
user = client.user().get()
self.assertEqual(user['authenticated_as'], email)
if __name__ == '__main__':
unittest.main()
| Add test for failing to find credentials | Add test for failing to find credentials
| Python | mit | skwashd/python-acquia-cloud | ---
+++
@@ -4,6 +4,7 @@
import unittest
from .. import Client
+from ..exceptions import AcquiaCloudException
@requests_mock.Mocker()
class TestClient(unittest.TestCase):
@@ -32,6 +33,17 @@
self.assertEqual(user, 'user')
self.assertEqual(token, 'token')
+ def test_find_credentials_none_set(self, m):
+ """
+ Tests finding the credentials in environment variables with empty credentials
+ """
+ os.environ['ACQUIA_CLOUD_API_USER'] = ''
+ os.environ['ACQUIA_CLOUD_API_TOKEN'] = ''
+ with self.assertRaises(AcquiaCloudException) as cm:
+ client = Client(cache=None)
+
+ self.assertEqual(str(cm.exception), 'Credentials not provided')
+
def test_user(self, m):
email = 'user@example.com'
m.register_uri('GET', |
ee24b8b57bc73947cd5140aca15389861b33ab00 | gui/qt.py | gui/qt.py | from lib.version import AMON_VERSION
from lib.keybase import KeybaseUser
from lib.gmail import GmailUser
from lib.addresses import AddressBook
import lib.gpg as gpg
import sys
import logging
import json
from PyQt4 import QtGui
class Amon(QtGui.QMainWindow):
def __init__(self):
super(Amon, self).__init__()
self.keybase_user = KeybaseUser()
self.gmail = GmailUser()
self.address_book = AddressBook()
| from lib.version import AMON_VERSION
from lib.keybase import KeybaseUser
from lib.gmail import GmailUser
from lib.addresses import AddressBook
import lib.gpg as gpg
import sys
import logging
import json
from PyQt4 import QtGui
class Amon(QtGui.QMainWindow):
def __init__(self):
super(Amon, self).__init__()
self.keybase_user = KeybaseUser()
self.gmail = GmailUser()
self.address_book = AddressBook()
self.initUI()
def initUI(self):
exitAction = QtGui.QAction(QtGui.QIcon('exit.png'), '&Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(QtGui.qApp.quit)
menubar = self.menuBar()
menubar.setNativeMenuBar(False)
fileMenu = menubar.addMenu('&File')
fileMenu.addAction(exitAction)
self.statusBar().showMessage('Ready')
self.setGeometry(300, 300, 300, 200)
self.setWindowTitle('Amon ' + AMON_VERSION)
self.show()
def main():
app = QtGui.QApplication(sys.argv)
amon = Amon()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| Update Qt gui to have status bar and menu bar | Update Qt gui to have status bar and menu bar
| Python | unlicense | CodingAnarchy/Amon | ---
+++
@@ -16,3 +16,30 @@
self.keybase_user = KeybaseUser()
self.gmail = GmailUser()
self.address_book = AddressBook()
+
+ self.initUI()
+
+ def initUI(self):
+ exitAction = QtGui.QAction(QtGui.QIcon('exit.png'), '&Exit', self)
+ exitAction.setShortcut('Ctrl+Q')
+ exitAction.setStatusTip('Exit application')
+ exitAction.triggered.connect(QtGui.qApp.quit)
+
+ menubar = self.menuBar()
+ menubar.setNativeMenuBar(False)
+ fileMenu = menubar.addMenu('&File')
+ fileMenu.addAction(exitAction)
+
+ self.statusBar().showMessage('Ready')
+ self.setGeometry(300, 300, 300, 200)
+ self.setWindowTitle('Amon ' + AMON_VERSION)
+ self.show()
+
+
+def main():
+ app = QtGui.QApplication(sys.argv)
+ amon = Amon()
+ sys.exit(app.exec_())
+
+if __name__ == '__main__':
+ main() |
6226d620078089b961fa2782d1bddb99534485ba | bin/system-info.py | bin/system-info.py | #!/usr/bin/env python
"""
System information for Tmux status line
Author: Roman Belikin roman[dot]sstu[at]gmail.com
"""
import os
import sys
import psutil
def info():
mem = psutil.virtual_memory()
return "mem >> %s/%sMB cpu >> %s%%" % (
str(int(mem.used / 1024 / 1024)),
str(int(mem.total / 1024 / 1024)),
psutil.cpu_percent(interval=0.1),
)
def main():
try:
print info()
except (KeyboardInterrupt, SystemExit):
pass
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
System information for Tmux status line
Author: Roman Belikin roman[dot]sstu[at]gmail.com
"""
import os
import sys
import psutil
def info():
mem = psutil.virtual_memory()
return "mem >> %s/%sMB cpu >> %s%% " % (
str(int(mem.used / 1024 / 1024)),
str(int(mem.total / 1024 / 1024)),
psutil.cpu_percent(interval=0.1),
)
def main():
try:
print info()
except (KeyboardInterrupt, SystemExit):
pass
if __name__ == '__main__':
main()
| Add space at left statusbar message | Add space at left statusbar message
| Python | mit | emmit8/dotfiles,emmit8/dotfiles,emmit8/dotfiles,trippyroman/dotfiles,trippyroman/dotfiles,trippyroman/dotfiles | ---
+++
@@ -13,7 +13,7 @@
def info():
mem = psutil.virtual_memory()
- return "mem >> %s/%sMB cpu >> %s%%" % (
+ return "mem >> %s/%sMB cpu >> %s%% " % (
str(int(mem.used / 1024 / 1024)),
str(int(mem.total / 1024 / 1024)),
psutil.cpu_percent(interval=0.1), |
d5d2bff8ad68f6a3d743d9eb80b26d6d0bba4a0f | bluebottle/events/tasks.py | bluebottle/events/tasks.py | from celery.schedules import crontab
from celery.task import periodic_task
from django.utils.timezone import now
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
import logging
from bluebottle.events.models import Event
logger = logging.getLogger('bluebottle')
@periodic_task(
run_every=(crontab(minute='*/1')),
name="check_event_start",
ignore_result=True
)
def check_event_start():
for tenant in Client.objects.all():
with LocalTenant(tenant, clear_tenant=True):
# Start events that are running now
events = Event.objects.filter(
start_time__lte=now(),
end_time__gte=now(),
status__in=['full', 'open']
).all()
for event in events:
event.transitions.start()
event.save()
@periodic_task(
run_every=(crontab(minute='*/1')),
name="check_event_end",
ignore_result=True
)
def check_event_end():
for tenant in Client.objects.all():
with LocalTenant(tenant, clear_tenant=True):
# Close events that are over
events = Event.objects.filter(
end_time__lte=now(),
status__in=['running']
).all()
for event in events:
event.transitions.succeed()
event.save()
| from celery.schedules import crontab
from celery.task import periodic_task
from django.utils.timezone import now
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
import logging
from bluebottle.events.models import Event
logger = logging.getLogger('bluebottle')
@periodic_task(
run_every=(crontab(minute='*/15')),
name="check_event_start",
ignore_result=True
)
def check_event_start():
for tenant in Client.objects.all():
with LocalTenant(tenant, clear_tenant=True):
# Start events that are running now
events = Event.objects.filter(
start_time__lte=now(),
end_time__gte=now(),
status__in=['full', 'open']
).all()
for event in events:
event.transitions.start()
event.save()
@periodic_task(
run_every=(crontab(minute='*/15')),
name="check_event_end",
ignore_result=True
)
def check_event_end():
for tenant in Client.objects.all():
with LocalTenant(tenant, clear_tenant=True):
# Close events that are over
events = Event.objects.filter(
end_time__lte=now(),
status__in=['running']
).all()
for event in events:
event.transitions.succeed()
event.save()
| Check once every 15 minutes | Check once every 15 minutes
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | ---
+++
@@ -12,7 +12,7 @@
@periodic_task(
- run_every=(crontab(minute='*/1')),
+ run_every=(crontab(minute='*/15')),
name="check_event_start",
ignore_result=True
)
@@ -32,7 +32,7 @@
@periodic_task(
- run_every=(crontab(minute='*/1')),
+ run_every=(crontab(minute='*/15')),
name="check_event_end",
ignore_result=True
) |
fb1a5f892e684143cd254447b5e1607a4b9e2c03 | blaze/io/_printing/array_repr.py | blaze/io/_printing/array_repr.py | from __future__ import absolute_import, division, print_function
from . import _arrayprint
from ...datadescriptor import RemoteDataDescriptor
def array_repr(a):
# TODO: create a mechanism for data descriptor to override
# printing.
if isinstance(a._data, RemoteDataDescriptor):
body = 'RemoteDataDescriptor(%r)' % a._data.url
else:
body = _arrayprint.array2string(a._data, separator=', ')
pre = 'array('
post = ',\n' + ' '*len(pre) + "dshape='" + str(a.dshape) + "'" + ')'
# For a multi-line, start it on the next line so things align properly
if '\n' in body:
pre += '\n'
return pre + body + post
| from __future__ import absolute_import, division, print_function
from . import _arrayprint
from ...datadescriptor import RemoteDataDescriptor
def array_repr(a):
pre = 'array('
post = ',\n' + ' '*len(pre) + "dshape='" + str(a.dshape) + "'" + ')'
# TODO: create a mechanism for data descriptor to override
# printing.
if isinstance(a._data, RemoteDataDescriptor):
body = 'RemoteDataDescriptor(%r)' % a._data.url
else:
body = _arrayprint.array2string(a._data,
separator=', ',
prefix=' '*len(pre))
return pre + body + post
| Revert repr change, it looks worse for small example arrays | Revert repr change, it looks worse for small example arrays
| Python | bsd-3-clause | xlhtc007/blaze,cowlicks/blaze,alexmojaki/blaze,FrancescAlted/blaze,maxalbert/blaze,mrocklin/blaze,dwillmer/blaze,ContinuumIO/blaze,nkhuyu/blaze,jdmcbr/blaze,jcrist/blaze,FrancescAlted/blaze,aterrel/blaze,AbhiAgarwal/blaze,AbhiAgarwal/blaze,alexmojaki/blaze,mwiebe/blaze,dwillmer/blaze,AbhiAgarwal/blaze,maxalbert/blaze,jcrist/blaze,ChinaQuants/blaze,cpcloud/blaze,caseyclements/blaze,mrocklin/blaze,markflorisson/blaze-core,ContinuumIO/blaze,cpcloud/blaze,markflorisson/blaze-core,cowlicks/blaze,FrancescAlted/blaze,scls19fr/blaze,mwiebe/blaze,AbhiAgarwal/blaze,markflorisson/blaze-core,aterrel/blaze,nkhuyu/blaze,markflorisson/blaze-core,LiaoPan/blaze,mwiebe/blaze,jdmcbr/blaze,mwiebe/blaze,caseyclements/blaze,aterrel/blaze,LiaoPan/blaze,ChinaQuants/blaze,scls19fr/blaze,FrancescAlted/blaze,xlhtc007/blaze | ---
+++
@@ -5,18 +5,16 @@
def array_repr(a):
+ pre = 'array('
+ post = ',\n' + ' '*len(pre) + "dshape='" + str(a.dshape) + "'" + ')'
+
# TODO: create a mechanism for data descriptor to override
# printing.
if isinstance(a._data, RemoteDataDescriptor):
body = 'RemoteDataDescriptor(%r)' % a._data.url
else:
- body = _arrayprint.array2string(a._data, separator=', ')
-
- pre = 'array('
- post = ',\n' + ' '*len(pre) + "dshape='" + str(a.dshape) + "'" + ')'
-
- # For a multi-line, start it on the next line so things align properly
- if '\n' in body:
- pre += '\n'
+ body = _arrayprint.array2string(a._data,
+ separator=', ',
+ prefix=' '*len(pre))
return pre + body + post |
7dced29bcf8b2b5f5220f5dbfeaf631d9d5fc409 | examples/backtest.py | examples/backtest.py | import time
import logging
from pythonjsonlogger import jsonlogger
from flumine import FlumineBacktest, clients
from strategies.lowestlayer import LowestLayer
logger = logging.getLogger()
custom_format = "%(asctime) %(levelname) %(message)"
log_handler = logging.StreamHandler()
formatter = jsonlogger.JsonFormatter(custom_format)
formatter.converter = time.gmtime
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
logger.setLevel(logging.INFO)
client = clients.BacktestClient()
framework = FlumineBacktest(client=client)
markets = ["tests/resources/PRO-1.170258213"]
strategy = LowestLayer(
market_filter={"markets": markets},
max_order_exposure=1000,
max_selection_exposure=105,
context={"stake": 2},
)
framework.add_strategy(strategy)
framework.run()
for market in framework.markets:
print("Profit: {0:.2f}".format(sum([o.simulated.profit for o in market.blotter])))
for order in market.blotter:
print(
order.selection_id,
order.responses.date_time_placed,
order.status,
order.order_type.price,
order.average_price_matched,
order.size_matched,
order.simulated.profit,
)
| import time
import logging
from pythonjsonlogger import jsonlogger
from flumine import FlumineBacktest, clients
from strategies.lowestlayer import LowestLayer
logger = logging.getLogger()
custom_format = "%(asctime) %(levelname) %(message)"
log_handler = logging.StreamHandler()
formatter = jsonlogger.JsonFormatter(custom_format)
formatter.converter = time.gmtime
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
logger.setLevel(logging.INFO) # Set to logging.CRITICAL to speed up backtest
client = clients.BacktestClient()
framework = FlumineBacktest(client=client)
markets = ["tests/resources/PRO-1.170258213"]
strategy = LowestLayer(
market_filter={"markets": markets},
max_order_exposure=1000,
max_selection_exposure=105,
context={"stake": 2},
)
framework.add_strategy(strategy)
framework.run()
for market in framework.markets:
print("Profit: {0:.2f}".format(sum([o.simulated.profit for o in market.blotter])))
for order in market.blotter:
print(
order.selection_id,
order.responses.date_time_placed,
order.status,
order.order_type.price,
order.average_price_matched,
order.size_matched,
order.simulated.profit,
)
| Comment to say using logging.CRITICAL is faster | Comment to say using logging.CRITICAL is faster
| Python | mit | liampauling/flumine | ---
+++
@@ -13,7 +13,7 @@
formatter.converter = time.gmtime
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
-logger.setLevel(logging.INFO)
+logger.setLevel(logging.INFO) # Set to logging.CRITICAL to speed up backtest
client = clients.BacktestClient()
|
245628bf53bf7255ccd5aa15d21ff8c1f5751ef8 | examples/listdevs.py | examples/listdevs.py | #!/usr/bin/env python
import usb1
def main():
context = usb1.USBContext()
for device in context.getDeviceList(skip_on_error=True):
print str(device)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import usb1
def main():
context = usb1.USBContext()
for device in context.getDeviceList(skip_on_error=True):
print 'ID %04x:%04x' % (device.getVendorID(), device.getProductID()), '->'.join(str(x) for x in ['Bus %03i' % (device.getBusNumber(), )] + device.getPortNumberList()), 'Device', device.getDeviceAddress()
if __name__ == '__main__':
main()
| Modify listdev to exercise getPortNumberList . | examples: Modify listdev to exercise getPortNumberList .
| Python | lgpl-2.1 | vpelletier/python-libusb1,vpelletier/python-libusb1 | ---
+++
@@ -4,7 +4,7 @@
def main():
context = usb1.USBContext()
for device in context.getDeviceList(skip_on_error=True):
- print str(device)
+ print 'ID %04x:%04x' % (device.getVendorID(), device.getProductID()), '->'.join(str(x) for x in ['Bus %03i' % (device.getBusNumber(), )] + device.getPortNumberList()), 'Device', device.getDeviceAddress()
if __name__ == '__main__':
main() |
0668a4bba21e44a028cb008b03165f63eba5b457 | acute/models.py | acute/models.py | """
acute models.
"""
from django.db.models import fields
from opal import models
class Demographics(models.Demographics): pass
class Location(models.Location): pass
class Allergies(models.Allergies): pass
class Diagnosis(models.Diagnosis): pass
class PastMedicalHistory(models.PastMedicalHistory): pass
class Treatment(models.Treatment): pass
class Investigation(models.Investigation): pass
class Clerking(models.EpisodeSubrecord):
_icon = 'fa fa-user'
referrer = fields.CharField(max_length=200, blank=True, null=True)
clerked_by = fields.CharField(max_length=200, blank=True, null=True)
consultant = fields.CharField(max_length=200, blank=True, null=True)
class Plan(models.EpisodeSubrecord):
_is_singleton = True
_icon = 'fa fa-list-ol'
plan = fields.TextField(blank=True, null=True)
class Rescuscitation(models.EpisodeSubrecord):
_icon = 'fa fa-warning'
status = fields.CharField(max_length=200, blank=True, null=True)
class NursingNotes(models.EpisodeSubrecord):
_icon = 'fa fa-info-circle'
notes = fields.TextField(blank=True, null=True)
class DischargeDue(models.EpisodeSubrecord):
_icon = 'fa fa-calendar'
date = fields.DateField(blank=True, null=True)
| """
acute models.
"""
from django.db.models import fields
from opal import models
class Demographics(models.Demographics): pass
class Location(models.Location): pass
class Allergies(models.Allergies): pass
class Diagnosis(models.Diagnosis): pass
class PastMedicalHistory(models.PastMedicalHistory): pass
class Treatment(models.Treatment): pass
class Investigation(models.Investigation): pass
class Clerking(models.EpisodeSubrecord):
_icon = 'fa fa-user'
_title = 'Seen by'
referrer = fields.CharField(max_length=200, blank=True, null=True)
clerked_by = fields.CharField(max_length=200, blank=True, null=True)
consultant = fields.CharField(max_length=200, blank=True, null=True)
class Plan(models.EpisodeSubrecord):
_is_singleton = True
_icon = 'fa fa-list-ol'
plan = fields.TextField(blank=True, null=True)
class Rescuscitation(models.EpisodeSubrecord):
_icon = 'fa fa-warning'
status = fields.CharField(max_length=200, blank=True, null=True)
class NursingNotes(models.EpisodeSubrecord):
_icon = 'fa fa-info-circle'
notes = fields.TextField(blank=True, null=True)
class DischargeDue(models.EpisodeSubrecord):
_icon = 'fa fa-calendar'
date = fields.DateField(blank=True, null=True)
| Rename Clerking -> Seen by | Rename Clerking -> Seen by
closes #1
| Python | agpl-3.0 | openhealthcare/acute,openhealthcare/acute,openhealthcare/acute | ---
+++
@@ -15,6 +15,7 @@
class Clerking(models.EpisodeSubrecord):
_icon = 'fa fa-user'
+ _title = 'Seen by'
referrer = fields.CharField(max_length=200, blank=True, null=True)
clerked_by = fields.CharField(max_length=200, blank=True, null=True) |
eb606f58b695dbb215b46cec3c895045e811bbad | scanpointgenerator/maskedgenerator.py | scanpointgenerator/maskedgenerator.py |
class Factory(object):
def __init__(self, generator, roi):
self.generator = generator
self.roi = roi
def iterator(self):
for point in self.generator.iterator():
if self.roi.contains_point(point):
yield point
|
class MaskedGenerator(object):
def __init__(self, generator, roi):
self.generator = generator
self.roi = roi
def iterator(self):
for point in self.generator.iterator():
if self.roi.contains_point(point):
yield point
| Rename Factory class to MaskedGenerator | Rename Factory class to MaskedGenerator
| Python | apache-2.0 | dls-controls/scanpointgenerator | ---
+++
@@ -1,6 +1,6 @@
-class Factory(object):
+class MaskedGenerator(object):
def __init__(self, generator, roi):
|
861d4f9773193a03d1f53c6e0c3f78d48b096d45 | juliet_importer.py | juliet_importer.py | import os
class loader:
modules = {};
def __init__(self):
self.load_modules();
def load_modules(self, path="./modules/"): # Consider adding recursive sorting at some point in the future
names = os.listdir(path);
pwd = os.getcwd();
os.chdir(path);
for name in names:
print("Importing module {0}".format(name));
name = name.split('.')[0];
try:
new_module = __import__(name);
self.modules[name] = new_module;
except ImportError:
print("Error importing module {0}".format(name));
continue;
print("Success");
os.chdir(pwd);
| import os
import imp
modules = {}
def load_modules(path="./modules/"): # Consider adding recursive sorting at some point in the future
names = os.listdir(path)
for name in names:
if not name.endswith(".py"): continue
print("Importing module {0}".format(name))
name = name.split('.')[0]
try:
new_module = imp.load_source(name, path)
modules[name] = new_module
except ImportError as e:
print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
print(e)
continue
print("Success")
load_modules()
| Remove superfluous class from importer | Remove superfluous class from importer
| Python | bsd-2-clause | halfbro/juliet | ---
+++
@@ -1,23 +1,21 @@
import os
+import imp
-class loader:
- modules = {};
+modules = {}
- def __init__(self):
- self.load_modules();
+def load_modules(path="./modules/"): # Consider adding recursive sorting at some point in the future
+ names = os.listdir(path)
+ for name in names:
+ if not name.endswith(".py"): continue
+ print("Importing module {0}".format(name))
+ name = name.split('.')[0]
+ try:
+ new_module = imp.load_source(name, path)
+ modules[name] = new_module
+ except ImportError as e:
+ print("Error importing module {0} from directory {1}".format(name,os.getcwd()))
+ print(e)
+ continue
+ print("Success")
- def load_modules(self, path="./modules/"): # Consider adding recursive sorting at some point in the future
- names = os.listdir(path);
- pwd = os.getcwd();
- os.chdir(path);
- for name in names:
- print("Importing module {0}".format(name));
- name = name.split('.')[0];
- try:
- new_module = __import__(name);
- self.modules[name] = new_module;
- except ImportError:
- print("Error importing module {0}".format(name));
- continue;
- print("Success");
- os.chdir(pwd);
+load_modules() |
99592279585c27ad2c41f50d49c1e3264173eae6 | actions/actions.py | actions/actions.py | #!/usr/bin/python
import sys
import os
sys.path.append('hooks/')
import subprocess
from charmhelpers.core.hookenv import action_fail
from utils import (
pause_unit,
resume_unit,
)
def pause(args):
"""Pause the Ceilometer services.
@raises Exception should the service fail to stop.
"""
pause_unit()
def resume(args):
"""Resume the Ceilometer services.
@raises Exception should the service fail to start."""
resume_unit()
ACTIONS = {"pause": pause, "resume": resume}
def main(args):
action_name = os.path.basename(args[0])
try:
action = ACTIONS[action_name]
except KeyError:
return "Action %s undefined" % action_name
else:
try:
action(args)
except Exception as e:
action_fail(str(e))
if __name__ == "__main__":
sys.exit(main(sys.argv))
| #!/usr/bin/python
import sys
import os
sys.path.append('hooks/')
import subprocess
from charmhelpers.core.hookenv import action_fail
from utils import (
pause_unit,
resume_unit,
)
def pause(args):
"""Pause the hacluster services.
@raises Exception should the service fail to stop.
"""
pause_unit()
def resume(args):
"""Resume the hacluster services.
@raises Exception should the service fail to start."""
resume_unit()
ACTIONS = {"pause": pause, "resume": resume}
def main(args):
action_name = os.path.basename(args[0])
try:
action = ACTIONS[action_name]
except KeyError:
return "Action %s undefined" % action_name
else:
try:
action(args)
except Exception as e:
action_fail(str(e))
if __name__ == "__main__":
sys.exit(main(sys.argv))
| Fix copy and pasta error | Fix copy and pasta error | Python | apache-2.0 | CanonicalBootStack/charm-hacluster,CanonicalBootStack/charm-hacluster,CanonicalBootStack/charm-hacluster | ---
+++
@@ -11,13 +11,13 @@
)
def pause(args):
- """Pause the Ceilometer services.
+ """Pause the hacluster services.
@raises Exception should the service fail to stop.
"""
pause_unit()
def resume(args):
- """Resume the Ceilometer services.
+ """Resume the hacluster services.
@raises Exception should the service fail to start."""
resume_unit()
|
16945303e5092bbd37f914ea10936d95e054f703 | harvesting_blog_data.py | harvesting_blog_data.py | import os
import sys
import json
import feedparser
from bs4 import BeautifulSoup
FEED_URL = 'http://g1.globo.com/dynamo/rss2.xml'
def cleanHtml(html):
return BeautifulSoup(html, 'lxml').get_text()
fp = feedparser.parse(FEED_URL)
print "Fetched %s entries from '%s'" % (len(fp.entries[0].title), fp.feed.title)
blog_posts = []
for e in fp.entries:
blog_posts.append({'title': e.title,
'published': e.published,
'summary': cleanHtml(e.summary),
'link': e.link})
out_file = os.path.join('./', 'feed.json')
f = open(out_file, 'w')
f.write(json.dumps(blog_posts, indent=1))
f.close()
print 'Wrote output file to %s' % (f.name, )
| # -*- coding: UTF-8 -*-
import os
import sys
import json
import feedparser
from bs4 import BeautifulSoup
FEED_URL = 'http://g1.globo.com/dynamo/rss2.xml'
fp = feedparser.parse(FEED_URL)
print "Fetched %s entries from '%s'" % (len(fp.entries[0].title), fp.feed.title)
blog_posts = []
for e in fp.entries:
blog_posts.append({'title': e.title,
'published': e.published,
'summary': BeautifulSoup(e.summary, 'lxml').get_text(),
'link': e.link})
out_file = os.path.join('./', 'feed.json')
f = open(out_file, 'w')
f.write(json.dumps(blog_posts, indent=1, ensure_ascii=False).encode('utf8'))
f.close()
print 'Wrote output file to %s' % (f.name, )
| Add G1 example and utf-8 | Add G1 example and utf-8
| Python | apache-2.0 | fabriciojoc/redes-sociais-web,fabriciojoc/redes-sociais-web | ---
+++
@@ -1,3 +1,5 @@
+# -*- coding: UTF-8 -*-
+
import os
import sys
import json
@@ -5,9 +7,6 @@
from bs4 import BeautifulSoup
FEED_URL = 'http://g1.globo.com/dynamo/rss2.xml'
-
-def cleanHtml(html):
- return BeautifulSoup(html, 'lxml').get_text()
fp = feedparser.parse(FEED_URL)
@@ -17,13 +16,13 @@
for e in fp.entries:
blog_posts.append({'title': e.title,
'published': e.published,
- 'summary': cleanHtml(e.summary),
+ 'summary': BeautifulSoup(e.summary, 'lxml').get_text(),
'link': e.link})
out_file = os.path.join('./', 'feed.json')
f = open(out_file, 'w')
-f.write(json.dumps(blog_posts, indent=1))
+f.write(json.dumps(blog_posts, indent=1, ensure_ascii=False).encode('utf8'))
f.close()
print 'Wrote output file to %s' % (f.name, ) |
bb045a6be1deacb1ee1c0e8746079ae77ec906f0 | admin/base/wsgi.py | admin/base/wsgi.py | """
WSGI config for api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
from website import settings
if not settings.DEBUG_MODE:
from gevent import monkey
monkey.patch_all()
# PATCH: avoid deadlock on getaddrinfo, this patch is necessary while waiting for
# the final gevent 1.1 release (https://github.com/gevent/gevent/issues/349)
unicode('foo').encode('idna') # noqa
import os # noqa
from django.core.wsgi import get_wsgi_application # noqa
from website.app import init_app # noqa
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'admin.base.settings')
init_app(set_backends=True, routes=False, attach_request_handlers=False)
application = get_wsgi_application()
| """
WSGI config for api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
from website import settings
if not settings.DEBUG_MODE:
from gevent import monkey
monkey.patch_all()
# PATCH: avoid deadlock on getaddrinfo, this patch is necessary while waiting for
# the final gevent 1.1 release (https://github.com/gevent/gevent/issues/349)
# unicode('foo').encode('idna') # noqa
from psycogreen.gevent import patch_psycopg # noqa
patch_psycopg()
import os # noqa
from django.core.wsgi import get_wsgi_application # noqa
from website.app import init_app # noqa
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'admin.base.settings')
init_app(set_backends=True, routes=False, attach_request_handlers=False)
application = get_wsgi_application()
| Patch psycopg correctly in admin | Patch psycopg correctly in admin
[skip ci]
| Python | apache-2.0 | caseyrollins/osf.io,mattclark/osf.io,Nesiehr/osf.io,acshi/osf.io,laurenrevere/osf.io,pattisdr/osf.io,cwisecarver/osf.io,acshi/osf.io,felliott/osf.io,felliott/osf.io,adlius/osf.io,pattisdr/osf.io,Nesiehr/osf.io,erinspace/osf.io,cslzchen/osf.io,chennan47/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,felliott/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,leb2dg/osf.io,mfraezz/osf.io,aaxelb/osf.io,leb2dg/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,caseyrollins/osf.io,hmoco/osf.io,cwisecarver/osf.io,acshi/osf.io,TomBaxter/osf.io,adlius/osf.io,mattclark/osf.io,mfraezz/osf.io,cslzchen/osf.io,chrisseto/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,sloria/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,crcresearch/osf.io,hmoco/osf.io,mluo613/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,acshi/osf.io,erinspace/osf.io,monikagrabowska/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,baylee-d/osf.io,chrisseto/osf.io,baylee-d/osf.io,icereval/osf.io,binoculars/osf.io,baylee-d/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,chrisseto/osf.io,binoculars/osf.io,icereval/osf.io,chrisseto/osf.io,chennan47/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,laurenrevere/osf.io,leb2dg/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,mluo613/osf.io,leb2dg/osf.io,chennan47/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,hmoco/osf.io,mluo613/osf.io,binoculars/osf.io,mfraezz/osf.io,caseyrollins/osf.io,sloria/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,icereval/osf.io,caneruguz/osf.io,erinspace/osf.io,caneruguz/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,sloria/osf.io,saradbowman/osf.io,adlius/osf.io,acshi/osf.io,felliott/osf.io,TomBaxter/osf.io | ---
+++
@@ -13,7 +13,10 @@
monkey.patch_all()
# PATCH: avoid deadlock on getaddrinfo, this patch is necessary while waiting for
# the final gevent 1.1 release (https://github.com/gevent/gevent/issues/349)
- unicode('foo').encode('idna') # noqa
+ # unicode('foo').encode('idna') # noqa
+
+ from psycogreen.gevent import patch_psycopg # noqa
+ patch_psycopg()
import os # noqa |
9c2951d794bb27952606cae77da1ebcd0d651e72 | aiodownload/api.py | aiodownload/api.py | # -*- coding: utf-8 -*-
from aiodownload import AioDownloadBundle, AioDownload
import asyncio
def one(url, download=None):
return [s for s in swarm([url], download=download)][0]
def swarm(urls, download=None):
return [e for e in each(urls, download=download)]
def each(iterable, url_map=None, download=None):
url_map = url_map or _url_map
download = download or AioDownload()
tasks = []
for i in iterable:
url = url_map(i)
info = None if i == url else i
tasks.append(
download._loop.create_task(
AioDownload(url, info=info)
)
)
for task_set in download._loop.run_until_complete(asyncio.wait(tasks)):
for task in task_set:
yield task.result()
def _url_map(x):
return str(x)
| # -*- coding: utf-8 -*-
from aiodownload import AioDownloadBundle, AioDownload
import asyncio
def one(url, download=None):
return [s for s in swarm([url], download=download)][0]
def swarm(urls, download=None):
return [e for e in each(urls, download=download)]
def each(iterable, url_map=None, download=None):
url_map = url_map or _url_map
download = download or AioDownload()
tasks = []
for i in iterable:
url = url_map(i)
info = None if i == url else i
tasks.append(
download._loop.create_task(
download.main(AioDownloadBundle(url, info=info))
)
)
for task_set in download._loop.run_until_complete(asyncio.wait(tasks)):
for task in task_set:
yield task.result()
def _url_map(x):
return str(x)
| Fix - needed to provide create_task a function, not a class | Fix - needed to provide create_task a function, not a class
| Python | mit | jelloslinger/aiodownload | ---
+++
@@ -25,7 +25,7 @@
info = None if i == url else i
tasks.append(
download._loop.create_task(
- AioDownload(url, info=info)
+ download.main(AioDownloadBundle(url, info=info))
)
)
|
633f84411e26201233e3c68c584b236363f79f62 | server/conf/vhosts/available/token.py | server/conf/vhosts/available/token.py | import core.provider.authentication as authentication
import core.notify.dispatcher as notify
import core.notify.plugins.available.changes as changes
import core.provider.storage as storage
import core.resource.base as resource
import conf.vhosts.available.default as default
class VHost(default.VHost):
host = ['localhost']
port = '8888'
def build(self):
"""
Build the resource tree
"""
default.VHost.build(self)
# [Change the notifiers] #
self.root.notifier = notify.Manual([changes.Plugin()])
# plain propfind xslt replaces the other one
xsltResource = resource.StaticResource(
storage.FileStorageProvider(self.config['basePath']+'/static/public/propfind-plain.xslt'),
expirationDays = 2
)
xsltResource.isLeaf = True
self.root.children['~static'].putChild('propfind.xslt', xsltResource)
tree = resource.TokenAccessResourceDecorator(self.root)
self.root = resource.TokenResource(
authProvider=authentication.TokenAuthProvider(secret=self.config['share']['secret'],
userProvider=self.user
),
tree=tree) | import core.provider.authentication as authentication
import core.notify.dispatcher as notify
import core.notify.plugins.available.changes as changes
import core.provider.storage as storage
import core.resource.base as resource
import conf.vhosts.available.default as default
class VHost(default.VHost):
host = ['localhost']
port = '8888'
def build(self):
"""
Build the resource tree
"""
default.VHost.build(self)
# [Change the notifiers] #
self.root.notifier = notify.Manual([changes.Plugin()])
# plain propfind xslt replaces the other one
xsltResource = resource.StaticResource(
storage.FileStorageProvider(self.config['basePath']+'/static/public/propfind-plain.xslt'),
expirationDays = 2
)
xsltResource.isLeaf = True
self.root.children['~static'].putChild('propfind.xslt', xsltResource)
leaves = {}
for path, child in self.root.children.items():
if child.isLeaf == True:
leaves[path] = child
tree = resource.TokenAccessResourceDecorator(self.root)
self.root = resource.TokenResource(
authProvider=authentication.TokenAuthProvider(secret=self.config['share']['secret'],
userProvider=self.user
),
tree=tree)
for path, child in leaves.items():
self.root.putChild(path, child) | Allow access to root leaves. | Allow access to root leaves.
| Python | mit | slaff/attachix,slaff/attachix,slaff/attachix | ---
+++
@@ -26,6 +26,11 @@
xsltResource.isLeaf = True
self.root.children['~static'].putChild('propfind.xslt', xsltResource)
+ leaves = {}
+ for path, child in self.root.children.items():
+ if child.isLeaf == True:
+ leaves[path] = child
+
tree = resource.TokenAccessResourceDecorator(self.root)
self.root = resource.TokenResource(
@@ -33,3 +38,6 @@
userProvider=self.user
),
tree=tree)
+
+ for path, child in leaves.items():
+ self.root.putChild(path, child) |
7da1326848cba8ff7bf61dde4583a12e5bad8ae2 | centinel/backend.py | centinel/backend.py | import requests
import config
def get_recommended_versions():
return request("/versions")
def get_experiments():
return request("/experiments")
def get_results():
return request("/results")
def get_clients():
return request("/clients")
def request(slug):
url = "%s%s" % (config.server_url, slug)
req = requests.get(url)
return req.json()
| import requests
import config
def request(slug):
url = "%s%s" % (config.server_url, slug)
req = requests.get(url)
if req.status_code != requests.codes.ok:
raise req.raise_for_status()
return req.json()
def get_recommended_versions():
return request("/versions")
def get_experiments():
return request("/experiments")
def get_results():
return request("/results")
def get_clients():
return request("/clients")
def request(slug):
url = "%s%s" % (config.server_url, slug)
req = requests.get(url)
return req.json()
| Raise exception if status code is not ok | Raise exception if status code is not ok
| Python | mit | JASONews/centinel,iclab/centinel,lianke123321/centinel,lianke123321/centinel,Ashish1805/centinel,rpanah/centinel,rpanah/centinel,ben-jones/centinel,iclab/centinel,lianke123321/centinel,iclab/centinel,rpanah/centinel | ---
+++
@@ -1,5 +1,14 @@
import requests
import config
+
+def request(slug):
+ url = "%s%s" % (config.server_url, slug)
+ req = requests.get(url)
+
+ if req.status_code != requests.codes.ok:
+ raise req.raise_for_status()
+
+ return req.json()
def get_recommended_versions():
return request("/versions") |
5225392a305e8e83a5a0fae91d3c2090914f2e5c | resolwe/flow/executors/docker.py | resolwe/flow/executors/docker.py | """Local workflow executor"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shlex
import subprocess
from django.conf import settings
from .local import FlowExecutor as LocalFlowExecutor
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
self.proc = subprocess.Popen(
shlex.split('docker run --rm --interactive --name={} {} /bin/bash'.format(self.data_id, container_image)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout
def run_script(self, script):
self.proc.stdin.write(os.linesep.join(['set -x', 'set +B', script]) + os.linesep)
self.proc.stdin.close()
def end(self):
self.proc.wait()
return self.proc.returncode
def terminate(self, data_id):
subprocess.call(shlex.split('docker rm -f {}').format(str(data_id)))
| """Local workflow executor"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import random
import shlex
import subprocess
from django.conf import settings
from .local import FlowExecutor as LocalFlowExecutor
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
if self.data_id != 'no_data_id':
container_name = 'resolwe_{}'.format(self.data_id)
else:
# set random container name for tests
rand_int = random.randint(1000, 9999)
container_name = 'resolwe_test_{}'.format(rand_int)
self.proc = subprocess.Popen(
shlex.split(
'docker run --rm --interactive --name={} {} /bin/bash'.format(container_name, container_image)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout
def run_script(self, script):
self.proc.stdin.write(os.linesep.join(['set -x', 'set +B', script]) + os.linesep)
self.proc.stdin.close()
def end(self):
self.proc.wait()
return self.proc.returncode
def terminate(self, data_id):
subprocess.call(shlex.split('docker rm -f {}').format(str(data_id)))
| Set random container name for tests | Set random container name for tests
| Python | apache-2.0 | jberci/resolwe,jberci/resolwe,genialis/resolwe,genialis/resolwe | ---
+++
@@ -2,6 +2,7 @@
from __future__ import absolute_import, division, print_function, unicode_literals
import os
+import random
import shlex
import subprocess
@@ -13,8 +14,17 @@
class FlowExecutor(LocalFlowExecutor):
def start(self):
container_image = settings.FLOW_EXECUTOR['CONTAINER_IMAGE']
+
+ if self.data_id != 'no_data_id':
+ container_name = 'resolwe_{}'.format(self.data_id)
+ else:
+ # set random container name for tests
+ rand_int = random.randint(1000, 9999)
+ container_name = 'resolwe_test_{}'.format(rand_int)
+
self.proc = subprocess.Popen(
- shlex.split('docker run --rm --interactive --name={} {} /bin/bash'.format(self.data_id, container_image)),
+ shlex.split(
+ 'docker run --rm --interactive --name={} {} /bin/bash'.format(container_name, container_image)),
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
self.stdout = self.proc.stdout |
304a7da03072dbe6e099bbda37fb8aca0567e64b | organizer/models.py | organizer/models.py | from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField()
link = models.URLField()
startup = models.ForeignKey(Startup)
| from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField()
link = models.URLField()
startup = models.ForeignKey(Startup)
| Define Startup model related fields. | Ch03: Define Startup model related fields. [skip ci]
https://docs.djangoproject.com/en/1.8/ref/models/fields/#manytomanyfield
A many-to-many relationship allows for different models to be related
to many of the other. For instance, students may take many classes, and
classes are taught to many students. In our code, we allow for Startups
to be categorized by Tag objects. A Tag object may categorize many
Startup objects, and Startups may be categorized by many Tag objects.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | ---
+++
@@ -17,6 +17,7 @@
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
+ tags = models.ManyToManyField(Tag)
class NewsLink(models.Model): |
d0bea0fa49eb6c70f4c014d210fddf3a3a500ce6 | ci/testsettings.py | ci/testsettings.py | # This file is exec'd from settings.py, so it has access to and can
# modify all the variables in settings.py.
# If this file is changed in development, the development server will
# have to be manually restarted because changes will not be noticed
# immediately.
DEBUG = False
# disable compression for tests that check javascript contents
COMPRESS_ENABLED = False
# include database settings to use Mariadb ver on production (5.5)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'test',
'USER': 'root',
'HOST': 'localhost',
'PORT': '',
'TEST': {
'CHARSET': 'utf8',
'COLLATION': 'utf8_general_ci',
},
},
}
# must be defined for initial setup
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://127.0.0.1:8983/solr/test-derrida',
'ADMIN_URL': 'http://127.0.0.1:8983/solr/admin/cores',
}
}
# for unit tests, that swap test connection in for default
HAYSTACK_TEST_CONNECTIONS = HAYSTACK_CONNECTIONS
# secret key added as a travis build step
| # This file is exec'd from settings.py, so it has access to and can
# modify all the variables in settings.py.
# If this file is changed in development, the development server will
# have to be manually restarted because changes will not be noticed
# immediately.
DEBUG = False
# disable compression for tests that check javascript contents
COMPRESS_ENABLED = False
# include database settings to use Mariadb ver on production (5.5)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'test',
'USER': 'root',
'HOST': 'localhost',
'PORT': '',
'TEST': {
'CHARSET': 'utf8',
'COLLATION': 'utf8_general_ci',
},
},
}
# must be defined for initial setup
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'derrida.common.solr_backend.RangeSolrEngine',
'URL': 'http://127.0.0.1:8983/solr/test-derrida',
'ADMIN_URL': 'http://127.0.0.1:8983/solr/admin/cores',
}
}
# for unit tests, that swap test connection in for default
HAYSTACK_TEST_CONNECTIONS = HAYSTACK_CONNECTIONS
# secret key added as a travis build step
| Update travis-ci settings to use local backend with range queries | Update travis-ci settings to use local backend with range queries
| Python | apache-2.0 | Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django | ---
+++
@@ -29,7 +29,7 @@
# must be defined for initial setup
HAYSTACK_CONNECTIONS = {
'default': {
- 'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
+ 'ENGINE': 'derrida.common.solr_backend.RangeSolrEngine',
'URL': 'http://127.0.0.1:8983/solr/test-derrida',
'ADMIN_URL': 'http://127.0.0.1:8983/solr/admin/cores',
} |
748e2e51a9ead7be4047e112aad2ed07a3d7a2c9 | systemvm/patches/debian/config/opt/cloud/bin/cs_dhcp.py | systemvm/patches/debian/config/opt/cloud/bin/cs_dhcp.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from pprint import pprint
from netaddr import *
def merge(dbag, data):
# A duplicate ip address wil clobber the old value
# This seems desirable ....
if "add" in data and data['add'] is False and \
"ipv4_adress" in data:
if data['ipv4_adress'] in dbag:
del(dbag[data['ipv4_adress']])
return dbag
else:
dbag[data['ipv4_adress']] = data
return dbag
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from pprint import pprint
from netaddr import *
def merge(dbag, data):
search(dbag, data['host_name'])
# A duplicate ip address wil clobber the old value
# This seems desirable ....
if "add" in data and data['add'] is False and \
"ipv4_adress" in data:
if data['ipv4_adress'] in dbag:
del(dbag[data['ipv4_adress']])
return dbag
else:
dbag[data['ipv4_adress']] = data
return dbag
def search(dbag, name):
"""
Dirty hack because CS does not deprovision hosts
"""
hosts = []
for o in dbag:
if o == 'id':
continue
print "%s %s" % (dbag[o]['host_name'], name)
if dbag[o]['host_name'] == name:
hosts.append(o)
for o in hosts:
del(dbag[o])
| Check both ip and hosts when building dhcp bag | Check both ip and hosts when building dhcp bag
| Python | apache-2.0 | GabrielBrascher/cloudstack,resmo/cloudstack,resmo/cloudstack,resmo/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,wido/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,jcshen007/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack | ---
+++
@@ -21,6 +21,7 @@
def merge(dbag, data):
+ search(dbag, data['host_name'])
# A duplicate ip address wil clobber the old value
# This seems desirable ....
if "add" in data and data['add'] is False and \
@@ -31,3 +32,18 @@
else:
dbag[data['ipv4_adress']] = data
return dbag
+
+
+def search(dbag, name):
+ """
+ Dirty hack because CS does not deprovision hosts
+ """
+ hosts = []
+ for o in dbag:
+ if o == 'id':
+ continue
+ print "%s %s" % (dbag[o]['host_name'], name)
+ if dbag[o]['host_name'] == name:
+ hosts.append(o)
+ for o in hosts:
+ del(dbag[o]) |
855724c4e52a55d141e2ef72cf7181710fb33d44 | dwitter/user/urls.py | dwitter/user/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<url_username>[a-z0-9]*)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
url(r'^(?P<url_username>[a-z0-9]*)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<url_username>\w+)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
url(r'^(?P<url_username>\w+)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
]
| Fix error when usernames have capital characters | Fix error when usernames have capital characters
| Python | apache-2.0 | lionleaf/dwitter,lionleaf/dwitter,lionleaf/dwitter | ---
+++
@@ -3,6 +3,6 @@
from . import views
urlpatterns = [
- url(r'^(?P<url_username>[a-z0-9]*)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
- url(r'^(?P<url_username>[a-z0-9]*)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
+ url(r'^(?P<url_username>\w+)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
+ url(r'^(?P<url_username>\w+)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
] |
fccc3be65ee5b82c8ac4f4810193ecaa7c6aed6c | backend/unichat/helpers.py | backend/unichat/helpers.py | from .models import School, User
import re
from django.contrib.auth.models import User as Django_User
from django.contrib.auth.hashers import make_password
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = Django_User.objects.make_random_password()
db_password = make_password(password)
user_obj = User(email=email, school=school, password=db_password)
user_obj.save()
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
# TODO: Send signup mail to user
| from .models import School, User
import re
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
user_obj = User.objects.create_user(email=email, school=school, password=password)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
# TODO: Send signup mail to user
| Change create_user helper with new User model | Change create_user helper with new User model
| Python | mit | dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet | ---
+++
@@ -1,7 +1,5 @@
from .models import School, User
import re
-from django.contrib.auth.models import User as Django_User
-from django.contrib.auth.hashers import make_password
def get_school_list():
@@ -27,9 +25,8 @@
def create_user(email, school):
- password = Django_User.objects.make_random_password()
- db_password = make_password(password)
- user_obj = User(email=email, school=school, password=db_password)
+ password = User.objects.make_random_password()
+ user_obj = User.objects.create_user(email=email, school=school, password=password)
+ user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
- user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
# TODO: Send signup mail to user |
2a72b26f63c81e3ceb64d8fd920f3a1327aa0e13 | cookiecutter/vcs.py | cookiecutter/vcs.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.vcs
----------------
Helper functions for working with version control systems.
"""
import logging
import os
import shutil
import subprocess
import sys
from .prompt import query_yes_no
def git_clone(repo, checkout=None):
"""
Clone a git repo to the current directory.
:param repo: Git repo URL ending with .git.
:param checkout: The branch, tag or commit ID to checkout after clone
"""
# Return repo dir
tail = os.path.split(repo)[1]
repo_dir = tail.rsplit('.git')[0]
logging.debug('repo_dir is {0}'.format(repo_dir))
if os.path.isdir(repo_dir):
ok_to_delete = query_yes_no(
"You've cloned {0} before. Is it okay to delete and re-clone it?".format(repo_dir),
default="yes"
)
if ok_to_delete:
shutil.rmtree(repo_dir)
else:
sys.exit()
os.system('git clone {0}'.format(repo))
if checkout is not None:
subprocess.check_call(['git', 'checkout', checkout], cwd=repo_dir)
return repo_dir
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.vcs
----------------
Helper functions for working with version control systems.
"""
import logging
import os
import shutil
import subprocess
import sys
from .prompt import query_yes_no
def git_clone(repo, checkout=None):
"""
Clone a git repo to the current directory.
:param repo: Git repo URL ending with .git.
:param checkout: The branch, tag or commit ID to checkout after clone
"""
# Return repo dir
tail = os.path.split(repo)[1]
repo_dir = tail.rsplit('.git')[0]
logging.debug('repo_dir is {0}'.format(repo_dir))
if os.path.isdir(repo_dir):
ok_to_delete = query_yes_no(
"You've cloned {0} before. Is it okay to delete and re-clone it?".format(repo_dir),
default="yes"
)
if ok_to_delete:
shutil.rmtree(repo_dir)
else:
sys.exit()
subprocess.check_call(['git', 'clone', repo], cwd='.')
if checkout is not None:
subprocess.check_call(['git', 'checkout', checkout], cwd=repo_dir)
return repo_dir
| Use subprocess instead of os.system to git clone. | Use subprocess instead of os.system to git clone.
| Python | bsd-3-clause | atlassian/cookiecutter,benthomasson/cookiecutter,luzfcb/cookiecutter,drgarcia1986/cookiecutter,moi65/cookiecutter,alex/cookiecutter,tylerdave/cookiecutter,drgarcia1986/cookiecutter,Vauxoo/cookiecutter,cguardia/cookiecutter,vintasoftware/cookiecutter,cichm/cookiecutter,agconti/cookiecutter,lgp171188/cookiecutter,nhomar/cookiecutter,vintasoftware/cookiecutter,letolab/cookiecutter,tylerdave/cookiecutter,stevepiercy/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,kkujawinski/cookiecutter,stevepiercy/cookiecutter,sp1rs/cookiecutter,agconti/cookiecutter,venumech/cookiecutter,janusnic/cookiecutter,dajose/cookiecutter,christabor/cookiecutter,christabor/cookiecutter,hackebrot/cookiecutter,nhomar/cookiecutter,letolab/cookiecutter,benthomasson/cookiecutter,willingc/cookiecutter,moi65/cookiecutter,terryjbates/cookiecutter,pjbull/cookiecutter,pjbull/cookiecutter,terryjbates/cookiecutter,0k/cookiecutter,hackebrot/cookiecutter,jhermann/cookiecutter,michaeljoseph/cookiecutter,ionelmc/cookiecutter,cguardia/cookiecutter,sp1rs/cookiecutter,cichm/cookiecutter,Vauxoo/cookiecutter,audreyr/cookiecutter,foodszhang/cookiecutter,alex/cookiecutter,atlassian/cookiecutter,utek/cookiecutter,vincentbernat/cookiecutter,jhermann/cookiecutter,ramiroluz/cookiecutter,dajose/cookiecutter,lucius-feng/cookiecutter,lucius-feng/cookiecutter,utek/cookiecutter,janusnic/cookiecutter,michaeljoseph/cookiecutter,foodszhang/cookiecutter,ramiroluz/cookiecutter,0k/cookiecutter,lgp171188/cookiecutter,vincentbernat/cookiecutter,Springerle/cookiecutter,takeflight/cookiecutter,audreyr/cookiecutter,Springerle/cookiecutter,kkujawinski/cookiecutter,luzfcb/cookiecutter,ionelmc/cookiecutter,venumech/cookiecutter | ---
+++
@@ -40,7 +40,7 @@
else:
sys.exit()
- os.system('git clone {0}'.format(repo))
+ subprocess.check_call(['git', 'clone', repo], cwd='.')
if checkout is not None:
subprocess.check_call(['git', 'checkout', checkout], cwd=repo_dir) |
93a7616d949494888f5357f5491aa3278e7de234 | cupy/logic/truth.py | cupy/logic/truth.py | import cupy
def all(a, axis=None, out=None, keepdims=False):
assert isinstance(a, cupy.ndarray)
return a.all(axis=axis, out=out, keepdims=keepdims)
def any(a, axis=None, out=None, keepdims=False):
assert isinstance(a, cupy.ndarray)
return a.any(axis=axis, out=out, keepdims=keepdims)
| import cupy
def all(a, axis=None, out=None, keepdims=False):
"""Tests whether all array elements along a given axis evaluate to True.
Args:
a (cupy.ndarray): Input array.
axis (int or tuple of ints): Along which axis to compute all.
The flattened array is used by default.
out (cupy.ndarray): Output array.
keepdims (bool): If ``True``, the axis is remained as an axis of
size one.
Returns:
cupy.ndarray: An array reduced of the input array along the axis.
.. seealso:: :data:`numpy.all`
"""
assert isinstance(a, cupy.ndarray)
return a.all(axis=axis, out=out, keepdims=keepdims)
def any(a, axis=None, out=None, keepdims=False):
"""Tests whether any array elements along a given axis evaluate to True.
Args:
a (cupy.ndarray): Input array.
axis (int or tuple of ints): Along which axis to compute all.
The flattened array is used by default.
out (cupy.ndarray): Output array.
keepdims (bool): If ``True``, the axis is remained as an axis of
size one.
Returns:
cupy.ndarray: An array reduced of the input array along the axis.
.. seealso:: :data:`numpy.any`
"""
assert isinstance(a, cupy.ndarray)
return a.any(axis=axis, out=out, keepdims=keepdims)
| Add documents of cupy.all and cupy.any function | Add documents of cupy.all and cupy.any function
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | ---
+++
@@ -2,10 +2,42 @@
def all(a, axis=None, out=None, keepdims=False):
+ """Tests whether all array elements along a given axis evaluate to True.
+
+ Args:
+ a (cupy.ndarray): Input array.
+ axis (int or tuple of ints): Along which axis to compute all.
+ The flattened array is used by default.
+ out (cupy.ndarray): Output array.
+ keepdims (bool): If ``True``, the axis is remained as an axis of
+ size one.
+
+ Returns:
+ cupy.ndarray: An array reduced of the input array along the axis.
+
+ .. seealso:: :data:`numpy.all`
+
+ """
assert isinstance(a, cupy.ndarray)
return a.all(axis=axis, out=out, keepdims=keepdims)
def any(a, axis=None, out=None, keepdims=False):
+ """Tests whether any array elements along a given axis evaluate to True.
+
+ Args:
+ a (cupy.ndarray): Input array.
+ axis (int or tuple of ints): Along which axis to compute all.
+ The flattened array is used by default.
+ out (cupy.ndarray): Output array.
+ keepdims (bool): If ``True``, the axis is remained as an axis of
+ size one.
+
+ Returns:
+ cupy.ndarray: An array reduced of the input array along the axis.
+
+ .. seealso:: :data:`numpy.any`
+
+ """
assert isinstance(a, cupy.ndarray)
return a.any(axis=axis, out=out, keepdims=keepdims) |
4785a5e8d639dea1a9cf767d2c77f6bd9dbe2433 | leapp/cli/upgrade/__init__.py | leapp/cli/upgrade/__init__.py | from leapp.utils.clicmd import command, command_opt
from leapp.repository.scan import find_and_scan_repositories
from leapp.config import get_config
from leapp.logger import configure_logger
def load_repositories_from(name, repo_path, manager=None):
if get_config().has_option('repositories', name):
repo_path = get_config().get('repositories', name)
return find_and_scan_repositories(repo_path, manager=manager)
def load_repositories():
load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
manager.load()
return manager
@command('upgrade', help='')
@command_opt('resume', is_flag=True, help='Continue the last execution after it was stopped (e.g. after reboot)')
def upgrade(args):
configure_logger()
repositories = load_repositories()
workflow = repositories.lookup_workflow('IPUWorkflow')
workflow.run()
| from leapp.utils.clicmd import command, command_opt
from leapp.repository.scan import find_and_scan_repositories
from leapp.config import get_config
from leapp.logger import configure_logger
def load_repositories_from(name, repo_path, manager=None):
if get_config().has_option('repositories', name):
repo_path = get_config().get('repositories', name)
return find_and_scan_repositories(repo_path, manager=manager)
def load_repositories():
manager = load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
manager.load()
return manager
@command('upgrade', help='')
@command_opt('resume', is_flag=True, help='Continue the last execution after it was stopped (e.g. after reboot)')
def upgrade(args):
configure_logger()
repositories = load_repositories()
workflow = repositories.lookup_workflow('IPUWorkflow')
workflow.run()
| Add back missing manager creation | leapp: Add back missing manager creation
| Python | lgpl-2.1 | leapp-to/prototype,vinzenz/prototype,leapp-to/prototype,vinzenz/prototype,vinzenz/prototype,leapp-to/prototype,vinzenz/prototype,leapp-to/prototype | ---
+++
@@ -11,7 +11,7 @@
def load_repositories():
- load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
+ manager = load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
manager.load()
return manager
|
8993ad7d2b15d05e26788c9bf39ed81794f724dc | branchconfig.py | branchconfig.py | #!/usr/bin/env python
import argparse
from git import Repo
import configprocessor
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--path", help="Path to git working copy")
parser.add_argument("-c", "--configfile", help="Configuration file to parse")
parser.add_argument("-o", "--outputfile")
args = parser.parse_args()
branch = getCurrentBranch(args.path)
configprocessor.run(args.configfile, args.outputfile, branch)
def getCurrentBranch(path):
repo = Repo(path)
branch = repo.active_branch
return branch
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import argparse
from git import Repo
import configprocessor
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--path", help="Path to git working copy")
parser.add_argument("-c", "--configfile", help="Configuration file to parse")
parser.add_argument("-o", "--outputfile")
args = parser.parse_args()
branch = getCurrentBranch(args.path)
configprocessor.run(args.configfile, args.outputfile, str(branch))
def getCurrentBranch(path):
repo = Repo(path)
branch = repo.active_branch
return branch
if __name__ == "__main__":
main()
| Convert branch to string explicitly | Convert branch to string explicitly
| Python | apache-2.0 | igoris/branch-config | ---
+++
@@ -13,7 +13,7 @@
args = parser.parse_args()
branch = getCurrentBranch(args.path)
- configprocessor.run(args.configfile, args.outputfile, branch)
+ configprocessor.run(args.configfile, args.outputfile, str(branch))
def getCurrentBranch(path):
repo = Repo(path) |
6e1a211ff1834f8047261d51737afcb0412075b5 | memleak.py | memleak.py | import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
| import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
if i % 25 == 0:
logging.info(util.nvidia_smi())
| Add logging of memory use | Add logging of memory use
| Python | mit | MaestroGraph/sparse-hyper | ---
+++
@@ -27,3 +27,7 @@
x, _ = hyper.flatten_indices(x, IN, OUT)
+ if i % 25 == 0:
+ logging.info(util.nvidia_smi())
+
+ |
badfa5c7c0572e36a94598ec6cc8a845e453d233 | chipy_org/settings_test.py | chipy_org/settings_test.py | # pylint: disable=unused-wildcard-import,wildcard-import
from .settings import *
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", "TEST": {}}}
DEBUG = True
ADMINS = ["admin@chipy.org"]
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
ENVELOPE_EMAIL_RECIPIENTS = [
"admin@example.com",
]
SECRET_KEY = "somesecretkeyfordjangogoeshere"
SECURE_SSL_REDIRECT = False
NORECAPTCHA_SITE_KEY = "6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI"
NORECAPTCHA_SECRET_KEY = "6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe"
| # pylint: disable=unused-wildcard-import,wildcard-import
from .settings import *
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", "TEST": {}}}
DEBUG = True
ADMINS = ["admin@chipy.org"]
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
ENVELOPE_EMAIL_RECIPIENTS = [
"admin@example.com",
]
INSTALLED_APPS.append("chipy_org.dev_utils")
SECRET_KEY = "somesecretkeyfordjangogoeshere"
SECURE_SSL_REDIRECT = False
NORECAPTCHA_SITE_KEY = "6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI"
NORECAPTCHA_SECRET_KEY = "6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe"
| Add dev_utils to test settings | Add dev_utils to test settings
| Python | mit | chicagopython/chipy.org,chicagopython/chipy.org,chicagopython/chipy.org,chicagopython/chipy.org | ---
+++
@@ -9,6 +9,8 @@
"admin@example.com",
]
+INSTALLED_APPS.append("chipy_org.dev_utils")
+
SECRET_KEY = "somesecretkeyfordjangogoeshere"
SECURE_SSL_REDIRECT = False
|
c1ab2795152d50de9f535e90b550d033feadf778 | tests/test_storage.py | tests/test_storage.py | import json
import os
from mock import patch
import pytest
from inmemorystorage.storage import InMemoryStorage
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'images')
@pytest.mark.django_db
def test_alternate_storage(admin_client, settings):
"""Verify can plugin alternate storage backend"""
settings.BETTY_IMAGE_ROOT = 'images'
storage = InMemoryStorage()
with patch('django.db.models.fields.files.default_storage._wrapped', storage):
# Create Image
path = os.path.join(TEST_DATA_PATH, 'lenna.png')
with open(path, "rb") as image:
resp = admin_client.post('/images/api/new', {"image": image})
assert resp.status_code == 200
image_id = json.loads(resp.content.decode("utf-8"))['id']
image.seek(0)
image_data = image.read()
storage_data = storage.filesystem.open('images/{}/lenna.png'.format(image_id)).read()
assert image_data == storage_data
assert storage.filesystem.exists('images/{}/optimized.png'.format(image_id))
# Delete Image
resp = admin_client.post("/images/api/{0}".format(image_id),
REQUEST_METHOD="DELETE")
assert not storage.filesystem.exists('images/{}/lenna.png'.format(image_id))
assert not storage.filesystem.exists('images/{}/optimized.png'.format(image_id))
| import json
import os
from mock import patch
import pytest
from inmemorystorage.storage import InMemoryStorage
TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), 'images')
@pytest.mark.django_db
def test_alternate_storage(admin_client, settings):
"""Verify can plugin alternate storage backend"""
settings.BETTY_IMAGE_ROOT = 'images'
storage = InMemoryStorage()
with patch('django.db.models.fields.files.default_storage._wrapped', storage):
# Create Image
path = os.path.join(TEST_DATA_PATH, 'Lenna.png')
with open(path, "rb") as image:
resp = admin_client.post('/images/api/new', {"image": image})
assert resp.status_code == 200
image_id = json.loads(resp.content.decode("utf-8"))['id']
image.seek(0)
image_data = image.read()
storage_data = storage.filesystem.open('images/{}/Lenna.png'.format(image_id)).read()
assert image_data == storage_data
assert storage.filesystem.exists('images/{}/optimized.png'.format(image_id))
# Delete Image
resp = admin_client.post("/images/api/{0}".format(image_id),
REQUEST_METHOD="DELETE")
assert not storage.filesystem.exists('images/{}/Lenna.png'.format(image_id))
assert not storage.filesystem.exists('images/{}/optimized.png'.format(image_id))
| Fix case-sensitive filename in storage test | Fix case-sensitive filename in storage test
| Python | mit | theonion/betty-cropper,theonion/betty-cropper,theonion/betty-cropper,theonion/betty-cropper | ---
+++
@@ -19,7 +19,7 @@
with patch('django.db.models.fields.files.default_storage._wrapped', storage):
# Create Image
- path = os.path.join(TEST_DATA_PATH, 'lenna.png')
+ path = os.path.join(TEST_DATA_PATH, 'Lenna.png')
with open(path, "rb") as image:
resp = admin_client.post('/images/api/new', {"image": image})
@@ -28,7 +28,7 @@
image.seek(0)
image_data = image.read()
- storage_data = storage.filesystem.open('images/{}/lenna.png'.format(image_id)).read()
+ storage_data = storage.filesystem.open('images/{}/Lenna.png'.format(image_id)).read()
assert image_data == storage_data
assert storage.filesystem.exists('images/{}/optimized.png'.format(image_id))
@@ -36,5 +36,5 @@
resp = admin_client.post("/images/api/{0}".format(image_id),
REQUEST_METHOD="DELETE")
- assert not storage.filesystem.exists('images/{}/lenna.png'.format(image_id))
+ assert not storage.filesystem.exists('images/{}/Lenna.png'.format(image_id))
assert not storage.filesystem.exists('images/{}/optimized.png'.format(image_id)) |
95d71d5a84f05de7d655fd788a4139c3a1316d74 | text/__init__.py | text/__init__.py | #! /usr/bin/env python
import os
def get_files(path, ext=None):
"""
Get all files in directory path, optionally with the specified extension
"""
if ext is None:
ext = ''
return [
os.path.abspath(fname)
for fname in os.listdir(path)
if os.path.isfile(fname)
if fname.endswith(ext)
]
| #! /usr/bin/env python
import os
def get_files(path, ext=None):
"""
Get all files in directory path, optionally with the specified extension
"""
if ext is None:
ext = ''
return [
os.path.abspath(fname)
for fname in os.listdir(path)
if os.path.isfile(fname)
if fname.endswith(ext)
]
def blob_text(filenames):
"""
Create a blob of text by reading in all filenames into a string
"""
return '\n'.join([open(fname).read() for fname in filenames])
| Add function to generate a blob of text from a list of files | Add function to generate a blob of text from a list of files
| Python | mit | IanLee1521/utilities | ---
+++
@@ -16,3 +16,10 @@
if os.path.isfile(fname)
if fname.endswith(ext)
]
+
+
+def blob_text(filenames):
+ """
+ Create a blob of text by reading in all filenames into a string
+ """
+ return '\n'.join([open(fname).read() for fname in filenames]) |
9891fa25d905bb2aa34c9c55fc420d29438f9499 | leonardo_ckeditor/__init__.py | leonardo_ckeditor/__init__.py |
from django.apps import AppConfig
from .ckeditor_config import DEFAULT_CONFIG
default_app_config = 'leonardo_ckeditor.Config'
LEONARDO_APPS = [
'leonardo_ckeditor',
'ckeditor',
'ckeditor_uploader'
]
LEONARDO_CONFIG = {
'CKEDITOR_UPLOAD_PATH': ('uploads/', ('CKEditor upload directory')),
'CKEDITOR_CONFIGS': ({'default': DEFAULT_CONFIG}, 'ckeditor config')
}
LEONARDO_OPTGROUP = 'CKEditor'
LEONARDO_JS_FILES = [
"leonardo_ckeditor/js/ckeditor-modal-init.js",
]
LEONARDO_CSS_FILES = [
'ckeditor/ckeditor/skins/moono/editor.css'
]
LEONARDO_PUBLIC = True
LEONARDO_URLS_CONF = 'ckeditor_uploader.urls'
class Config(AppConfig):
name = 'leonardo_ckeditor'
verbose_name = "leonardo-ckeditor"
def ready(self):
from ckeditor.widgets import CKEditorWidget
from leonardo.module.web.widget.htmltext import models
models.HtmlTextWidget.widgets['text'] = CKEditorWidget()
|
from django.apps import AppConfig
from .ckeditor_config import DEFAULT_CONFIG
default_app_config = 'leonardo_ckeditor.Config'
LEONARDO_APPS = [
'leonardo_ckeditor',
'ckeditor',
'ckeditor_uploader'
]
LEONARDO_CONFIG = {
'CKEDITOR_UPLOAD_PATH': ('', ('CKEditor upload directory')),
'CKEDITOR_CONFIGS': ({'default': DEFAULT_CONFIG}, 'ckeditor config')
}
LEONARDO_OPTGROUP = 'CKEditor'
LEONARDO_JS_FILES = [
"leonardo_ckeditor/js/ckeditor-modal-init.js",
]
LEONARDO_CSS_FILES = [
'ckeditor/ckeditor/skins/moono/editor.css'
]
LEONARDO_PUBLIC = True
LEONARDO_URLS_CONF = 'ckeditor_uploader.urls'
class Config(AppConfig):
name = 'leonardo_ckeditor'
verbose_name = "leonardo-ckeditor"
def ready(self):
from ckeditor_uploader.widgets import CKEditorUploadingWidget
from leonardo.module.web.widget.htmltext import models
models.HtmlTextWidget.widgets['text'] = CKEditorUploadingWidget()
| Use upload widget in the default state. | Use upload widget in the default state.
| Python | bsd-3-clause | leonardo-modules/leonardo-ckeditor,leonardo-modules/leonardo-ckeditor | ---
+++
@@ -12,7 +12,7 @@
]
LEONARDO_CONFIG = {
- 'CKEDITOR_UPLOAD_PATH': ('uploads/', ('CKEditor upload directory')),
+ 'CKEDITOR_UPLOAD_PATH': ('', ('CKEditor upload directory')),
'CKEDITOR_CONFIGS': ({'default': DEFAULT_CONFIG}, 'ckeditor config')
}
LEONARDO_OPTGROUP = 'CKEditor'
@@ -33,6 +33,6 @@
verbose_name = "leonardo-ckeditor"
def ready(self):
- from ckeditor.widgets import CKEditorWidget
+ from ckeditor_uploader.widgets import CKEditorUploadingWidget
from leonardo.module.web.widget.htmltext import models
- models.HtmlTextWidget.widgets['text'] = CKEditorWidget()
+ models.HtmlTextWidget.widgets['text'] = CKEditorUploadingWidget() |
038ea41054c3b7d07a7297d392e5e5d5f9c59d6a | logout.py | logout.py | #!/usr/local/bin/python3
# ^^^ this is bad practice, DON'T do as I did!
import cgitb # debugging
import footer
import header
from htmlify import *
print("Content-Type: text/html;charset=utf-8\n")
cgitb.enable() # enable debugging
header.showHeader()
# content
startTag("div", id="container") # start container
dispHTML("h3", contents="Logout")
startTag("form", id="login-form", method="POST", action="/cgi-bin/ic/main.py") # login form
dispHTML("button", contents="To log out, click here.", name="logout")
endTag("form") # end login form
endTag("div") # end containter
# footer
footer.showFooter()
| #!/usr/local/bin/python3
# ^^^ this is bad practice, DON'T do as I did!
import cgitb # debugging
import footer
import header
from htmlify import *
print("Content-Type: text/html;charset=utf-8\n")
print('<meta http-equiv="set-cookie" content="password="";>')
cgitb.enable() # enable debugging
header.showHeader()
# content
startTag("div", id="container") # start container
dispHTML("h3", contents="Logout")
dispHTML("p", contents="You have been logged out.")
startTag("p")
dispHTML("a", href="/cgi-bin/ic/main.py", contents="Return Home")
endTag("div") # end container
# footer
footer.showFooter()
| Fix inability to log out | Fix inability to log out
| Python | apache-2.0 | ISD-Sound-and-Lights/InventoryControl | ---
+++
@@ -7,16 +7,17 @@
print("Content-Type: text/html;charset=utf-8\n")
+print('<meta http-equiv="set-cookie" content="password="";>')
cgitb.enable() # enable debugging
header.showHeader()
# content
startTag("div", id="container") # start container
dispHTML("h3", contents="Logout")
-startTag("form", id="login-form", method="POST", action="/cgi-bin/ic/main.py") # login form
-dispHTML("button", contents="To log out, click here.", name="logout")
-endTag("form") # end login form
-endTag("div") # end containter
+dispHTML("p", contents="You have been logged out.")
+startTag("p")
+dispHTML("a", href="/cgi-bin/ic/main.py", contents="Return Home")
+endTag("div") # end container
# footer |
532bebd01822917f89ec18080bf8e5a75c16832d | config_template.py | config_template.py | chatbot_ubuntu = {
'path': '',
'model_id': '',
'python_env': ''
}
chatbot_swisscom = {
'path': '',
'model_id': '',
'python_env': ''
}
chatbot_ubuntu_seq2seq = {
'socket_address': '',
'socket_port': ''
}
ate = {
'path': '',
'python_env': ''
}
neuroate = {
'path': '',
'python_env': ''
}
ner = {
'path': '',
'python_env': ''
}
kpextract = {
'path': '',
'fetcher_path': '',
'python_env': ''
}
neural_programmer = {
'socket_address': '',
'socket_port': '',
'mongo': False,
'mongo_address': '',
'mongo_port': '',
'mongo_db': '',
'mongo_feedback_coll': '',
'mongo_use_coll': ''
}
gsw_translator = {
'pbsmt_only_url': '',
'pbsmt_phono_url': '',
'pbsmt_ortho_url': '',
'pbsmt_cbnmt_url': ''
}
machine_translation_stdlangs = {
'base_url': ''
} | chatbot_ubuntu = {
'path': '',
'model_id': '',
'python_env': ''
}
chatbot_swisscom = {
'path': '',
'model_id': '',
'python_env': ''
}
chatbot_ubuntu_seq2seq = {
'socket_address': '',
'socket_port': ''
}
ate = {
'path': '',
'python_env': ''
}
neuroate = {
'path': '',
'python_env': ''
}
ner = {
'path': '',
'python_env': ''
}
kpextract = {
'path': '',
'fetcher_path': '',
'python_env': ''
}
neural_programmer = {
'socket_address': '',
'socket_port': '',
'mongo': False,
'mongo_address': '',
'mongo_port': '',
'mongo_db': '',
'mongo_feedback_coll': '',
'mongo_use_coll': ''
}
gsw_translator = {
'pbsmt_only_url': '',
'pbsmt_phono_url': '',
'pbsmt_ortho_url': '',
'pbsmt_cbnmt_url': ''
}
machine_translation_stdlangs = {
'base_url': ''
}
churn = {
'path' : '',
'python_env': '',
'e_host':'',
'e_port':
} | Add churn entry in the file | Add churn entry in the file
| Python | mit | nachoaguadoc/aimlx-demos,nachoaguadoc/aimlx-demos,nachoaguadoc/aimlx-demos | ---
+++
@@ -48,3 +48,9 @@
machine_translation_stdlangs = {
'base_url': ''
}
+churn = {
+ 'path' : '',
+ 'python_env': '',
+ 'e_host':'',
+ 'e_port':
+} |
e4452ff7e8c27e2e8315c2edb8627a2e92ca86e3 | panoptes_cli/scripts/panoptes.py | panoptes_cli/scripts/panoptes.py | import click
import os
import yaml
from panoptes_client import Panoptes
@click.group()
@click.option(
'--endpoint', type=str
)
@click.pass_context
def cli(ctx, endpoint):
ctx.config_dir = os.path.join(os.environ['HOME'], '.panoptes')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
'username': '',
'password': '',
}
try:
with open(ctx.config_file) as conf_f:
ctx.config.update(yaml.load(conf_f))
except IOError:
pass
if endpoint:
ctx.config['endpoint'] = endpoint
Panoptes.connect(
endpoint=ctx.config['endpoint'],
username=ctx.config['username'],
password=ctx.config['password']
)
from panoptes_cli.commands.configure import *
from panoptes_cli.commands.project import *
from panoptes_cli.commands.subject import *
from panoptes_cli.commands.subject_set import *
from panoptes_cli.commands.workflow import *
| import click
import os
import yaml
from panoptes_client import Panoptes
@click.group()
@click.option(
'--endpoint', type=str
)
@click.pass_context
def cli(ctx, endpoint):
ctx.config_dir = os.path.expanduser('~/.panoptes/')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
'username': '',
'password': '',
}
try:
with open(ctx.config_file) as conf_f:
ctx.config.update(yaml.load(conf_f))
except IOError:
pass
if endpoint:
ctx.config['endpoint'] = endpoint
Panoptes.connect(
endpoint=ctx.config['endpoint'],
username=ctx.config['username'],
password=ctx.config['password']
)
from panoptes_cli.commands.configure import *
from panoptes_cli.commands.project import *
from panoptes_cli.commands.subject import *
from panoptes_cli.commands.subject_set import *
from panoptes_cli.commands.workflow import *
| Use os.path.expanduser to find config directory | Use os.path.expanduser to find config directory
Works on Windows and Unix.
| Python | apache-2.0 | zooniverse/panoptes-cli | ---
+++
@@ -9,7 +9,7 @@
)
@click.pass_context
def cli(ctx, endpoint):
- ctx.config_dir = os.path.join(os.environ['HOME'], '.panoptes')
+ ctx.config_dir = os.path.expanduser('~/.panoptes/')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org', |
57a477985f3591258dee8a5cbf4ba2a173c749fc | dashboard/views.py | dashboard/views.py | from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from actstream.models import Action
from teams.models import Team
@login_required
def index(request):
template_name = 'dashboard/index.html'
# Chain together queries to find all team activities.
q = None
team_ct = ContentType.objects.get(app_label='teams', model='team')
for team in request.user.team_set.all():
subq = Q(target_content_type=team_ct) & Q(target_object_id=team.id)
if q is None:
q = subq
else:
q |= subq
team_actions = Action.objects.filter(q).order_by('-timestamp')
template_context = {
'team_actions': team_actions,
}
return render_to_response(
template_name,
template_context,
RequestContext(request),
)
| from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from actstream.models import Action
from teams.models import Team
@login_required
def index(request):
template_name = 'dashboard/index.html'
# Chain together queries to find all team activities.
q = None
team_ct = ContentType.objects.get(app_label='teams', model='team')
for team in request.user.team_set.all():
subq = Q(target_content_type=team_ct) & Q(target_object_id=team.id)
if q is None:
q = subq
else:
q |= subq
if q is not None:
team_actions = Action.objects.filter(q).order_by('-timestamp')
else:
team_actions = []
template_context = {
'team_actions': team_actions,
}
return render_to_response(
template_name,
template_context,
RequestContext(request),
)
| Fix dashboard bug when user is not in any teams. | Fix dashboard bug when user is not in any teams. | Python | apache-2.0 | snswa/swsites,snswa/swsites,snswa/swsites | ---
+++
@@ -21,7 +21,10 @@
q = subq
else:
q |= subq
- team_actions = Action.objects.filter(q).order_by('-timestamp')
+ if q is not None:
+ team_actions = Action.objects.filter(q).order_by('-timestamp')
+ else:
+ team_actions = []
template_context = {
'team_actions': team_actions,
} |
7ebd46c4a698b642dd1da355e413eb1bc9ce2727 | learntris.py | learntris.py | #!/usr/bin/env python
import sys
class Grid(object):
def __init__(self):
self.board = [[None] * 10 for i in range(22)]
self.score = 0
self.lines_clear = 0
def draw_board(self):
current_board = self.board
for row in current_board:
row = map(lambda cell: '.' if cell == None else y, row)
print ' '.join(row)
def given(self):
self.board = []
for row in range(0,22):
self.board.append(raw_input())
def clear(self):
self.board = ['. '*10 for row in range(0,22)]
def show_score(self):
print self.score
def show_clear_lines(self):
print self.lines_clear
def main():
grid = Grid()
commands = {'p': grid.draw_board,
'g': grid.given,
'c': grid.clear,
'?s': grid.show_score,
'?n': grid.show_clear_lines}
while True:
command = raw_input()
if command == 'q':
break
commands[command]()
if __name__ == '__main__':
main() | #!/usr/bin/env python
import sys
class Grid(object):
def __init__(self):
self.board = [[None] * 10 for i in range(22)]
self.score = 0
self.lines_clear = 0
def draw_board(self):
current_board = self.board
for row in current_board:
row = map(lambda cell: '.' if cell == None else cell, row)
print ' '.join(row)
def given(self):
for index, row in enumerate(self.board):
self.board[index] = [None if cell == '.' else cell for cell in raw_input() if cell != ' ']
def clear(self):
self.board = [[None] * 10 for i in range(22)]
def show_score(self):
print self.score
def show_clear_lines(self):
print self.lines_clear
def step(self):
for index, row in enumerate(self.board):
if all(row) and row[0] != None:
self.board[index] = [None] * 10
self.score += 100
self.lines_clear += 1
def main():
grid = Grid()
commands = {'p' : grid.draw_board,
'g' : grid.given,
'c' : grid.clear,
'?s': grid.show_score,
'?n': grid.show_clear_lines,
's' : grid.step}
while True:
command = raw_input()
if command == 'q':
break
commands[command]()
if __name__ == '__main__':
main() | Update rest of functions with new data structure; Passes Test 8 | Update rest of functions with new data structure; Passes Test 8
| Python | mit | mosegontar/learntris | ---
+++
@@ -14,16 +14,16 @@
current_board = self.board
for row in current_board:
- row = map(lambda cell: '.' if cell == None else y, row)
+ row = map(lambda cell: '.' if cell == None else cell, row)
print ' '.join(row)
def given(self):
- self.board = []
- for row in range(0,22):
- self.board.append(raw_input())
+
+ for index, row in enumerate(self.board):
+ self.board[index] = [None if cell == '.' else cell for cell in raw_input() if cell != ' ']
def clear(self):
- self.board = ['. '*10 for row in range(0,22)]
+ self.board = [[None] * 10 for i in range(22)]
def show_score(self):
print self.score
@@ -31,15 +31,25 @@
def show_clear_lines(self):
print self.lines_clear
+ def step(self):
+ for index, row in enumerate(self.board):
+ if all(row) and row[0] != None:
+ self.board[index] = [None] * 10
+ self.score += 100
+ self.lines_clear += 1
+
+
+
def main():
grid = Grid()
- commands = {'p': grid.draw_board,
- 'g': grid.given,
- 'c': grid.clear,
+ commands = {'p' : grid.draw_board,
+ 'g' : grid.given,
+ 'c' : grid.clear,
'?s': grid.show_score,
- '?n': grid.show_clear_lines}
+ '?n': grid.show_clear_lines,
+ 's' : grid.step}
while True:
command = raw_input() |
604102af89c0f3ab3b9562b3baa246f24de3fe90 | locations/spiders/la_salsa.py | locations/spiders/la_salsa.py | # -*- coding: utf-8 -*-
import scrapy
from locations.items import GeojsonPointItem
import json
import re
class LaSalsaSpider(scrapy.Spider):
name = "la_salsa"
allowed_domains = ["www.lasalsa.com"]
start_urls = (
'http://lasalsa.com/wp-content/themes/lasalsa-main/locations-search.php?lat=0&lng=0&radius=99999999',
)
def parse(self, response):
restaurantData = response.xpath("//markers").extract_first()
matches = re.finditer("<marker [\S\s]+?\"\/>", restaurantData)
for match in matches:
matchString = match.group(0)
fullAddress=re.findall("address=\"(.*?)\"", matchString)[0].replace('<br />', ',')
#Accounts for cases with second address line
yield GeojsonPointItem(
ref=re.findall("name=\"(.*?)\"", matchString)[0].strip(),
lat=re.findall("latitude=\"(.*?)\"", matchString)[0].strip(),
lon=re.findall("longitude=\"(.*?)\"", matchString)[0].strip(),
addr_full=re.findall("address=\"(.*?)\"", matchString)[0].replace('<br />', ',').strip(),
city=re.findall("city=\"(.*?)\"", matchString)[0].strip(),
state=re.findall("state=\"(.*?)\"", matchString)[0].strip(),
postcode=re.findall("zip=\"(.*?)\"", matchString)[0].strip(),
phone=re.findall("phone=\"(.*?)\"", matchString)[0].replace(' ','').strip(),
)
| # -*- coding: utf-8 -*-
import scrapy
from locations.items import GeojsonPointItem
class LaSalsaSpider(scrapy.Spider):
name = "la_salsa"
allowed_domains = ["www.lasalsa.com"]
start_urls = (
'http://lasalsa.com/wp-content/themes/lasalsa-main/locations-search.php?lat=0&lng=0&radius=99999999',
)
def parse(self, response):
for match in response.xpath("//markers/marker"):
yield GeojsonPointItem(
ref=match.xpath('.//@name').extract_first(),
lat=float(match.xpath('.//@latitude').extract_first()),
lon=float(match.xpath('.//@longitude').extract_first()),
addr_full=match.xpath('.//@address').extract_first(),
city=match.xpath('.//@city').extract_first(),
state=match.xpath('.//@state').extract_first(),
postcode=match.xpath('.//@zip').extract_first(),
phone=match.xpath('.//@phone').extract_first(),
)
| Use xpath instead of regex | Use xpath instead of regex
| Python | mit | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | ---
+++
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
import scrapy
+
from locations.items import GeojsonPointItem
-import json
-import re
+
class LaSalsaSpider(scrapy.Spider):
name = "la_salsa"
@@ -12,25 +12,14 @@
)
def parse(self, response):
- restaurantData = response.xpath("//markers").extract_first()
- matches = re.finditer("<marker [\S\s]+?\"\/>", restaurantData)
-
-
-
-
- for match in matches:
- matchString = match.group(0)
- fullAddress=re.findall("address=\"(.*?)\"", matchString)[0].replace('<br />', ',')
- #Accounts for cases with second address line
-
+ for match in response.xpath("//markers/marker"):
yield GeojsonPointItem(
- ref=re.findall("name=\"(.*?)\"", matchString)[0].strip(),
- lat=re.findall("latitude=\"(.*?)\"", matchString)[0].strip(),
- lon=re.findall("longitude=\"(.*?)\"", matchString)[0].strip(),
- addr_full=re.findall("address=\"(.*?)\"", matchString)[0].replace('<br />', ',').strip(),
- city=re.findall("city=\"(.*?)\"", matchString)[0].strip(),
- state=re.findall("state=\"(.*?)\"", matchString)[0].strip(),
- postcode=re.findall("zip=\"(.*?)\"", matchString)[0].strip(),
- phone=re.findall("phone=\"(.*?)\"", matchString)[0].replace(' ','').strip(),
+ ref=match.xpath('.//@name').extract_first(),
+ lat=float(match.xpath('.//@latitude').extract_first()),
+ lon=float(match.xpath('.//@longitude').extract_first()),
+ addr_full=match.xpath('.//@address').extract_first(),
+ city=match.xpath('.//@city').extract_first(),
+ state=match.xpath('.//@state').extract_first(),
+ postcode=match.xpath('.//@zip').extract_first(),
+ phone=match.xpath('.//@phone').extract_first(),
)
- |
7588bab65a098cbc0b5e2ba2c1b9a45b08adfc46 | fsspec/__init__.py | fsspec/__init__.py | try:
from importlib.metadata import entry_points
except ImportError: # python < 3.8
try:
from importlib_metadata import entry_points
except ImportError:
entry_points = None
from . import caching
from ._version import get_versions
from .core import get_fs_token_paths, open, open_files, open_local
from .mapping import FSMap, get_mapper
from .registry import (
filesystem,
get_filesystem_class,
register_implementation,
registry,
)
from .spec import AbstractFileSystem
__version__ = get_versions()["version"]
del get_versions
__all__ = [
"AbstractFileSystem",
"FSMap",
"filesystem",
"register_implementation",
"get_filesystem_class",
"get_fs_token_paths",
"get_mapper",
"open",
"open_files",
"open_local",
"registry",
"caching",
]
if entry_points is not None:
try:
entry_points = entry_points()
except TypeError:
pass # importlib-metadata < 0.8
else:
for spec in entry_points.get("fsspec.specs", []):
err_msg = f"Unable to load filesystem from {spec}"
register_implementation(
spec.name, spec.value.replace(":", "."), errtxt=err_msg
)
| try:
from importlib.metadata import entry_points
except ImportError: # python < 3.8
try:
from importlib_metadata import entry_points
except ImportError:
entry_points = None
from . import caching
from ._version import get_versions
from .core import get_fs_token_paths, open, open_files, open_local
from .exceptions import FSBaseException, FSTimeoutError
from .mapping import FSMap, get_mapper
from .registry import (
filesystem,
get_filesystem_class,
register_implementation,
registry,
)
from .spec import AbstractFileSystem
__version__ = get_versions()["version"]
del get_versions
__all__ = [
"AbstractFileSystem",
"FSBaseException",
"FSTimeoutError",
"FSMap",
"filesystem",
"register_implementation",
"get_filesystem_class",
"get_fs_token_paths",
"get_mapper",
"open",
"open_files",
"open_local",
"registry",
"caching",
]
if entry_points is not None:
try:
entry_points = entry_points()
except TypeError:
pass # importlib-metadata < 0.8
else:
for spec in entry_points.get("fsspec.specs", []):
err_msg = f"Unable to load filesystem from {spec}"
register_implementation(
spec.name, spec.value.replace(":", "."), errtxt=err_msg
)
| Make the exceptions valid on fsspec module level | Make the exceptions valid on fsspec module level
| Python | bsd-3-clause | intake/filesystem_spec,fsspec/filesystem_spec,fsspec/filesystem_spec | ---
+++
@@ -10,6 +10,7 @@
from . import caching
from ._version import get_versions
from .core import get_fs_token_paths, open, open_files, open_local
+from .exceptions import FSBaseException, FSTimeoutError
from .mapping import FSMap, get_mapper
from .registry import (
filesystem,
@@ -25,6 +26,8 @@
__all__ = [
"AbstractFileSystem",
+ "FSBaseException",
+ "FSTimeoutError",
"FSMap",
"filesystem",
"register_implementation", |
c126950f653169ad3d5035ea1580a7c4c7250f22 | test/test_Spectrum.py | test/test_Spectrum.py | #!/usr/bin/env python3
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
spec = Spectrum(x, y)
assert spec.flux == y
assert spec.x == x
| #!/usr/bin/env python3
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
z = 2200*x
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
| Fix test to pass, get class from module and add wavelength assignment | Fix test to pass, get class from module and add wavelength assignment
| Python | mit | jason-neal/spectrum_overload,jason-neal/spectrum_overload,jason-neal/spectrum_overload | ---
+++
@@ -14,6 +14,8 @@
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
- spec = Spectrum(x, y)
+ z = 2200*x
+ spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
- assert spec.x == x
+ assert spec.pixel == x
+ assert spec.wavelength == z |
1c01423e0cccd64ab249a9749f04cf3e155d3f53 | setup.py | setup.py | from distutils.core import setup
with open('README.md') as readme:
with open('HISTORY.md') as history:
long_description = readme.read() + '\n\n' + history.read()
try:
import pypandoc
long_description = pypandoc.convert(long_description, 'rst')
except(IOError, ImportError):
long_description = long_description
VERSION = '1.0.1'
setup(
name='argparse-autogen',
py_modules=['argparse_autogen'],
version=VERSION,
url='https://github.com/sashgorokhov/argparse-autogen',
download_url='https://github.com/sashgorokhov/argparse-autogen/archive/v%s.zip' % VERSION,
keywords=['python', 'argparse', 'generate'],
classifiers=[],
long_description=long_description,
license='MIT License',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description="Parser with automatic creation of parsers and subparsers for paths.",
)
| from distutils.core import setup
with open('README.md') as readme:
with open('HISTORY.md') as history:
long_description = readme.read() + '\n\n' + history.read()
try:
import pypandoc
long_description = pypandoc.convert(long_description, 'rst', 'markdown')
except(IOError, ImportError):
long_description = long_description
VERSION = '1.0.1'
setup(
name='argparse-autogen',
py_modules=['argparse_autogen'],
version=VERSION,
url='https://github.com/sashgorokhov/argparse-autogen',
download_url='https://github.com/sashgorokhov/argparse-autogen/archive/v%s.zip' % VERSION,
keywords=['python', 'argparse', 'generate'],
classifiers=[],
long_description=long_description,
license='MIT License',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description="Parser with automatic creation of parsers and subparsers for paths.",
)
| Convert md to rst readme specially for PyPi | Convert md to rst readme specially for PyPi
| Python | mit | sashgorokhov/argparse-autogen | ---
+++
@@ -7,7 +7,7 @@
try:
import pypandoc
- long_description = pypandoc.convert(long_description, 'rst')
+ long_description = pypandoc.convert(long_description, 'rst', 'markdown')
except(IOError, ImportError):
long_description = long_description
|
7844aa93d4f6836fbb8bba3a0af6b2e0e17c6fde | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-oscar-fancypages',
version=":versiontools:fancypages:",
url='https://github.com/tangentlabs/django-oscar-fancypages',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="Adding fancy CMS-style pages to Oscar",
long_description=open('README.rst').read(),
keywords="django, oscar, e-commerce, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'versiontools>=1.9.1',
'Django>=1.4.1',
#'django-oscar>=0.3.3',
'django-model-utils>=1.1.0',
'django-compressor>=1.2',
],
dependency_links=[
'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar'
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
#'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-oscar-fancypages',
version=":versiontools:fancypages:",
url='https://github.com/tangentlabs/django-oscar-fancypages',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="Adding fancy CMS-style pages to Oscar",
long_description=open('README.rst').read(),
keywords="django, oscar, e-commerce, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'versiontools>=1.9.1',
'Django>=1.4.1',
'django-oscar>=0.4',
'django-model-utils>=1.1.0',
'django-compressor>=1.2',
'virtual-node>=0.0.1',
'virtual-less>=0.0.1',
],
dependency_links=[
'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar-0.4'
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
#'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
| Update dependencies for node and less | Update dependencies for node and less
| Python | bsd-3-clause | tangentlabs/django-oscar-fancypages,tangentlabs/django-oscar-fancypages | ---
+++
@@ -17,12 +17,14 @@
install_requires=[
'versiontools>=1.9.1',
'Django>=1.4.1',
- #'django-oscar>=0.3.3',
+ 'django-oscar>=0.4',
'django-model-utils>=1.1.0',
'django-compressor>=1.2',
+ 'virtual-node>=0.0.1',
+ 'virtual-less>=0.0.1',
],
dependency_links=[
- 'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar'
+ 'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar-0.4'
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[ |
3fd264c4927a11bb3915dee38a17b169ea801c63 | setup.py | setup.py | #!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
import os.path
tests_require = [
'redis',
'unittest2',
]
setup(
name='Mule',
version='1.0',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/mule',
description = 'Distributed Testing',
packages=find_packages(),
zip_safe=False,
install_requires=[
'celery',
'uuid',
],
dependency_links=[],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='mule.runtests.runtests',
include_package_data=True,
entry_points = {
'console_scripts': [
'mule = mule.scripts.runner:main',
],
},
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
| #!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
import os.path
tests_require = [
'redis',
'unittest2',
]
setup(
name='Mule',
version='0.1.0',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/mule',
description = 'Distributed Testing',
packages=find_packages(),
zip_safe=False,
install_requires=[
'celery',
'uuid',
],
dependency_links=[],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='mule.runtests.runtests',
include_package_data=True,
entry_points = {
'console_scripts': [
'mule = mule.scripts.runner:main',
],
},
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
| Drop verison to 0.1.0 for internal release | Drop verison to 0.1.0 for internal release
| Python | apache-2.0 | disqus/mule | ---
+++
@@ -15,7 +15,7 @@
]
setup(
name='Mule',
- version='1.0',
+ version='0.1.0',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/mule', |
08335e060311994a897b95302fc54a0a2b196614 | mdx_linkify/__init__.py | mdx_linkify/__init__.py | from mdx_linkify.mdx_linkify import makeExtension
| import sys
is_python3 = sys.version_info >= (3, 0)
if is_python3:
from mdx_linkify.mdx_linkify import makeExtension
else:
from mdx_linkify import makeExtension
assert makeExtension # Silences pep8.
| Fix import for python2 and pypy | Fix import for python2 and pypy
| Python | mit | daGrevis/mdx_linkify | ---
+++
@@ -1 +1,12 @@
-from mdx_linkify.mdx_linkify import makeExtension
+import sys
+
+
+is_python3 = sys.version_info >= (3, 0)
+
+if is_python3:
+ from mdx_linkify.mdx_linkify import makeExtension
+else:
+ from mdx_linkify import makeExtension
+
+
+assert makeExtension # Silences pep8. |
3f00fc50b0eba9516cfc92b2448df299a68b5524 | main_test.py | main_test.py | fuckit('checktz')
import asyncio
import sys, os
import fuckit
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
def test_time():
assert checktz.GetTime("test") == "Finished with no errors!"
| import asyncio
import sys, os
import fuckit
fuckit('checktz')
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
def test_time():
assert checktz.GetTime("test") == "Finished with no errors!"
| Make the builds actually pass | Make the builds actually pass | Python | epl-1.0 | Bentechy66/Interlaced-Minds-Bot | ---
+++
@@ -1,7 +1,7 @@
-fuckit('checktz')
import asyncio
import sys, os
import fuckit
+fuckit('checktz')
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../')
|
ca917fa28c5bf8fe3c431868951f429c48b58e0a | buysafe/urls.py | buysafe/urls.py | from django.conf.urls import patterns, url
urlpatterns = patterns(
'buysafe.views',
url(r'^entry/(?P<order_id>\d+)/$', 'entry', name='buysafe_pay'),
(r'^start/$', 'start'),
(r'^success/(?P<payment_type>[01])/$', 'success'),
(r'^fail/(?P<payment_type>[01])/$', 'fail'),
(r'^check/(?P<payment_type>[01])/$', 'check')
)
| from django.conf.urls import patterns, url
urlpatterns = patterns(
'buysafe.views',
url(r'^entry/(?P<order_id>\d+)/$', 'entry', name='buysafe_pay'),
url(r'^start/$', 'start', name="buysafe_start"),
(r'^success/(?P<payment_type>[01])/$', 'success'),
(r'^fail/(?P<payment_type>[01])/$', 'fail'),
(r'^check/(?P<payment_type>[01])/$', 'check')
)
| Add view label to buysafe_start | Add view label to buysafe_start
| Python | bsd-3-clause | uranusjr/django-buysafe | ---
+++
@@ -4,7 +4,7 @@
urlpatterns = patterns(
'buysafe.views',
url(r'^entry/(?P<order_id>\d+)/$', 'entry', name='buysafe_pay'),
- (r'^start/$', 'start'),
+ url(r'^start/$', 'start', name="buysafe_start"),
(r'^success/(?P<payment_type>[01])/$', 'success'),
(r'^fail/(?P<payment_type>[01])/$', 'fail'),
(r'^check/(?P<payment_type>[01])/$', 'check') |
320981553f589d801d17b000d6f74c301f552811 | survey/tests/test_default_settings.py | survey/tests/test_default_settings.py | from survey.tests import BaseTest
from django.test import override_settings
from django.conf import settings
from survey import set_default_settings
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
| from django.conf import settings
from django.test import override_settings
from survey import set_default_settings
from survey.tests import BaseTest
@override_settings()
class TestDefaultSettings(BaseTest):
def test_set_choices_separator(self):
url = "/admin/survey/survey/1/change/"
del settings.CHOICES_SEPARATOR
self.login()
set_default_settings()
try:
self.client.get(url)
except AttributeError:
self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR")
| Apply black following @gjelsas MR | Apply black following @gjelsas MR
| Python | agpl-3.0 | Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey | ---
+++
@@ -1,7 +1,8 @@
+from django.conf import settings
+from django.test import override_settings
+
+from survey import set_default_settings
from survey.tests import BaseTest
-from django.test import override_settings
-from django.conf import settings
-from survey import set_default_settings
@override_settings() |
15030074c73f41a2a298c5de77b584875b7a5441 | sympy/logic/benchmarks/run-solvers.py | sympy/logic/benchmarks/run-solvers.py | from __future__ import print_function, division
from sympy.logic.utilities import load_file
from sympy.logic import satisfiable
import time
import os
import sys
input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1])
INPUT = [5 * i for i in range(2, 16)]
ALGORITHMS = ['dpll', 'dpll2']
results = {}
for test in INPUT:
results[test] = {}
for test in INPUT:
for alg in ALGORITHMS:
file_name = "%s/input/%d.cnf" % (input_path, test)
theory = load_file(file_name)
start = time.time()
if not satisfiable(theory, algorithm=alg):
raise ValueError("Function returned false")
end = time.time()
results[test][alg] = end - start
print("Test %d in time %.2f seconds for algorithm %s." %
(test, end - start, alg))
print("problem," + ','.join(ALGORITHMS))
for test in INPUT:
line = "%d" % test
for alg in ALGORITHMS:
line += ",%f" % results[test][alg]
print(line)
| from __future__ import print_function, division
from sympy.logic.utilities import load_file
from sympy.logic import satisfiable
import time
import os
import sys
input_path = os.path.dirname(__file__)
INPUT = [5 * i for i in range(2, 16)]
ALGORITHMS = ['dpll', 'dpll2']
results = {}
if __name__ == '__main__':
for test in INPUT:
results[test] = {}
for test in INPUT:
for alg in ALGORITHMS:
file_name = os.path.join(input_path, 'input', '%s.cnf' % test)
theory = load_file(file_name)
start = time.time()
if not satisfiable(theory, algorithm=alg):
raise ValueError("Function returned false")
end = time.time()
results[test][alg] = end - start
print("Test %d in time %.2f seconds for algorithm %s." %
(test, end - start, alg))
print("problem," + ','.join(ALGORITHMS))
for test in INPUT:
line = "%d" % test
for alg in ALGORITHMS:
line += ",%f" % results[test][alg]
print(line)
| Make logic benchmarks runner more portable | Make logic benchmarks runner more portable
| Python | bsd-3-clause | abloomston/sympy,wyom/sympy,madan96/sympy,sunny94/temp,liangjiaxing/sympy,kaichogami/sympy,Davidjohnwilson/sympy,shikil/sympy,AunShiLord/sympy,shipci/sympy,sahilshekhawat/sympy,pbrady/sympy,jaimahajan1997/sympy,MechCoder/sympy,Davidjohnwilson/sympy,mcdaniel67/sympy,atreyv/sympy,debugger22/sympy,madan96/sympy,sampadsaha5/sympy,Arafatk/sympy,Arafatk/sympy,shikil/sympy,skirpichev/omg,Curious72/sympy,skidzo/sympy,souravsingh/sympy,ga7g08/sympy,toolforger/sympy,atreyv/sympy,iamutkarshtiwari/sympy,souravsingh/sympy,mcdaniel67/sympy,hargup/sympy,cswiercz/sympy,chaffra/sympy,farhaanbukhsh/sympy,Sumith1896/sympy,chaffra/sympy,Titan-C/sympy,rahuldan/sympy,vipulroxx/sympy,atsao72/sympy,jbbskinny/sympy,sahilshekhawat/sympy,moble/sympy,grevutiu-gabriel/sympy,Shaswat27/sympy,meghana1995/sympy,iamutkarshtiwari/sympy,pbrady/sympy,Designist/sympy,diofant/diofant,sahmed95/sympy,MechCoder/sympy,Mitchkoens/sympy,saurabhjn76/sympy,sahilshekhawat/sympy,mcdaniel67/sympy,sahmed95/sympy,kevalds51/sympy,jaimahajan1997/sympy,kaichogami/sympy,kaushik94/sympy,rahuldan/sympy,yukoba/sympy,rahuldan/sympy,maniteja123/sympy,farhaanbukhsh/sympy,oliverlee/sympy,jerli/sympy,ga7g08/sympy,hargup/sympy,Designist/sympy,jaimahajan1997/sympy,kaichogami/sympy,Designist/sympy,skidzo/sympy,sahmed95/sympy,cswiercz/sympy,meghana1995/sympy,chaffra/sympy,emon10005/sympy,Mitchkoens/sympy,oliverlee/sympy,emon10005/sympy,iamutkarshtiwari/sympy,drufat/sympy,jerli/sympy,Vishluck/sympy,VaibhavAgarwalVA/sympy,bukzor/sympy,kumarkrishna/sympy,vipulroxx/sympy,shipci/sympy,maniteja123/sympy,wanglongqi/sympy,jamesblunt/sympy,pandeyadarsh/sympy,kumarkrishna/sympy,debugger22/sympy,kaushik94/sympy,yukoba/sympy,pandeyadarsh/sympy,Gadal/sympy,kevalds51/sympy,ahhda/sympy,yukoba/sympy,ahhda/sympy,postvakje/sympy,Mitchkoens/sympy,yashsharan/sympy,toolforger/sympy,jamesblunt/sympy,debugger22/sympy,ahhda/sympy,abloomston/sympy,Vishluck/sympy,wanglongqi/sympy,maniteja123/sympy,sunny94/temp,moble/sympy,Vishluck/sympy,saurabhjn76/sympy,aktech/sympy,kaushik94/sympy,abloomston/sympy,asm666/sympy,jamesblunt/sympy,ChristinaZografou/sympy,wyom/sympy,shikil/sympy,grevutiu-gabriel/sympy,bukzor/sympy,skidzo/sympy,jbbskinny/sympy,AkademieOlympia/sympy,aktech/sympy,AunShiLord/sympy,atreyv/sympy,garvitr/sympy,drufat/sympy,Titan-C/sympy,VaibhavAgarwalVA/sympy,atsao72/sympy,mafiya69/sympy,atsao72/sympy,pbrady/sympy,asm666/sympy,wanglongqi/sympy,madan96/sympy,Arafatk/sympy,ChristinaZografou/sympy,saurabhjn76/sympy,garvitr/sympy,jbbskinny/sympy,postvakje/sympy,MechCoder/sympy,drufat/sympy,beni55/sympy,lindsayad/sympy,yashsharan/sympy,toolforger/sympy,abhiii5459/sympy,garvitr/sympy,sunny94/temp,farhaanbukhsh/sympy,kumarkrishna/sympy,abhiii5459/sympy,Curious72/sympy,meghana1995/sympy,grevutiu-gabriel/sympy,AunShiLord/sympy,Titan-C/sympy,AkademieOlympia/sympy,VaibhavAgarwalVA/sympy,lindsayad/sympy,shipci/sympy,beni55/sympy,Sumith1896/sympy,Davidjohnwilson/sympy,liangjiaxing/sympy,Sumith1896/sympy,wyom/sympy,emon10005/sympy,ga7g08/sympy,AkademieOlympia/sympy,asm666/sympy,souravsingh/sympy,Shaswat27/sympy,postvakje/sympy,sampadsaha5/sympy,vipulroxx/sympy,pandeyadarsh/sympy,abhiii5459/sympy,jerli/sympy,sampadsaha5/sympy,aktech/sympy,Gadal/sympy,mafiya69/sympy,Curious72/sympy,beni55/sympy,hargup/sympy,ChristinaZografou/sympy,cswiercz/sympy,oliverlee/sympy,yashsharan/sympy,kevalds51/sympy,Shaswat27/sympy,lindsayad/sympy,bukzor/sympy,Gadal/sympy,moble/sympy,mafiya69/sympy,liangjiaxing/sympy | ---
+++
@@ -6,31 +6,32 @@
import os
import sys
-input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1])
+input_path = os.path.dirname(__file__)
INPUT = [5 * i for i in range(2, 16)]
ALGORITHMS = ['dpll', 'dpll2']
results = {}
-for test in INPUT:
- results[test] = {}
+if __name__ == '__main__':
+ for test in INPUT:
+ results[test] = {}
-for test in INPUT:
- for alg in ALGORITHMS:
- file_name = "%s/input/%d.cnf" % (input_path, test)
- theory = load_file(file_name)
- start = time.time()
- if not satisfiable(theory, algorithm=alg):
- raise ValueError("Function returned false")
- end = time.time()
- results[test][alg] = end - start
- print("Test %d in time %.2f seconds for algorithm %s." %
- (test, end - start, alg))
+ for test in INPUT:
+ for alg in ALGORITHMS:
+ file_name = os.path.join(input_path, 'input', '%s.cnf' % test)
+ theory = load_file(file_name)
+ start = time.time()
+ if not satisfiable(theory, algorithm=alg):
+ raise ValueError("Function returned false")
+ end = time.time()
+ results[test][alg] = end - start
+ print("Test %d in time %.2f seconds for algorithm %s." %
+ (test, end - start, alg))
-print("problem," + ','.join(ALGORITHMS))
+ print("problem," + ','.join(ALGORITHMS))
-for test in INPUT:
- line = "%d" % test
- for alg in ALGORITHMS:
- line += ",%f" % results[test][alg]
- print(line)
+ for test in INPUT:
+ line = "%d" % test
+ for alg in ALGORITHMS:
+ line += ",%f" % results[test][alg]
+ print(line) |
adfcd15e8c9f3c4b08bdb358d041401bf77d2a25 | calicoctl/calico_ctl/__init__.py | calicoctl/calico_ctl/__init__.py | __version__ = "0.13.0-dev"
__kubernetes_plugin_version__ = "v0.6.0"
__rkt_plugin_version__ = "v0.1.0"
__libnetwork_plugin_version__ = "v0.6.0"
__libcalico_version__ = "v0.6.0"
__felix_version__ = "1.2.1"
| __version__ = "0.13.0-dev"
__kubernetes_plugin_version__ = "v0.7.0"
__rkt_plugin_version__ = "v0.1.0"
__libnetwork_plugin_version__ = "v0.7.0"
__libcalico_version__ = "v0.7.0"
__felix_version__ = "1.3.0-pre5"
| Fix release numbers to be latest values | Fix release numbers to be latest values
| Python | apache-2.0 | caseydavenport/calico-containers,Metaswitch/calico-docker,caseydavenport/calico-containers,projectcalico/calico-containers,quater/calico-containers,Metaswitch/calico-docker,insequent/calico-docker,projectcalico/calico-containers,TrimBiggs/calico-containers,tomdee/calico-docker,TrimBiggs/calico-docker,caseydavenport/calico-docker,caseydavenport/calico-docker,tomdee/calico-containers,TrimBiggs/calico-docker,caseydavenport/calico-containers,tomdee/calico-docker,tomdee/calico-containers,projectcalico/calico-docker,quater/calico-containers,TrimBiggs/calico-containers,projectcalico/calico-docker,projectcalico/calico-containers,insequent/calico-docker | ---
+++
@@ -1,6 +1,6 @@
__version__ = "0.13.0-dev"
-__kubernetes_plugin_version__ = "v0.6.0"
+__kubernetes_plugin_version__ = "v0.7.0"
__rkt_plugin_version__ = "v0.1.0"
-__libnetwork_plugin_version__ = "v0.6.0"
-__libcalico_version__ = "v0.6.0"
-__felix_version__ = "1.2.1"
+__libnetwork_plugin_version__ = "v0.7.0"
+__libcalico_version__ = "v0.7.0"
+__felix_version__ = "1.3.0-pre5" |
2060a89cc008e3fc19b90b2278001350ef6b49ad | stellar/data/stellargraph.py | stellar/data/stellargraph.py | # -*- coding: utf-8 -*-
#
# Copyright 2017-2018 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import networkx as nx
class StellarGraph(nx.MultiDiGraph):
"""
Our own StellarGraph class, inherited from nx.MultiDiGraph, with extra stuff to be added that's needed by samplers and mappers
"""
pass
| # -*- coding: utf-8 -*-
#
# Copyright 2017-2018 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import networkx as nx
class StellarGraph(nx.MultiGraph):
"""
Our own class for heterogeneous undirected graphs, inherited from nx.MultiGraph, with extra stuff to be added that's needed by samplers and mappers
"""
def __init__(self):
super().__init__()
class StellarDiGraph(nx.MultiDiGraph):
"""
Our own class for heterogeneous directed graphs, inherited from nx.MultiDiGraph, with extra stuff to be added that's needed by samplers and mappers
"""
def __init__(self):
super().__init__()
| Split StellarGraph class into two: undirected and directed graph classes | Split StellarGraph class into two: undirected and directed graph classes
| Python | apache-2.0 | stellargraph/stellargraph,stellargraph/stellargraph | ---
+++
@@ -17,9 +17,21 @@
import networkx as nx
-class StellarGraph(nx.MultiDiGraph):
+class StellarGraph(nx.MultiGraph):
"""
- Our own StellarGraph class, inherited from nx.MultiDiGraph, with extra stuff to be added that's needed by samplers and mappers
+ Our own class for heterogeneous undirected graphs, inherited from nx.MultiGraph, with extra stuff to be added that's needed by samplers and mappers
"""
- pass
+ def __init__(self):
+ super().__init__()
+
+
+class StellarDiGraph(nx.MultiDiGraph):
+ """
+ Our own class for heterogeneous directed graphs, inherited from nx.MultiDiGraph, with extra stuff to be added that's needed by samplers and mappers
+ """
+
+ def __init__(self):
+ super().__init__()
+
+ |
0f9d3b0ed9efc72b8b3fd4d466caa4517691546c | strategies/alexStrategies.py | strategies/alexStrategies.py | class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
| class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
class CardCounter:
"""This strategy folds based on card counting expectation values."""
def __init__(self, scared=0.23):
from collections import Counter
self.Counter = Counter
self.scared = scared
def play(self, info):
c = self.Counter(info.deck)
if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
| Add a simple card counting strategy | Add a simple card counting strategy
| Python | mit | AlexMooney/pairsTournament | ---
+++
@@ -17,3 +17,16 @@
return 'Hit me'
else:
return 'fold'
+
+class CardCounter:
+ """This strategy folds based on card counting expectation values."""
+ def __init__(self, scared=0.23):
+ from collections import Counter
+ self.Counter = Counter
+ self.scared = scared
+ def play(self, info):
+ c = self.Counter(info.deck)
+ if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
+ return 'Hit me'
+ else:
+ return 'fold' |
619462203f3369b807e14e4715f992c40224b37a | account_fiscal_position_no_source_tax/account.py | account_fiscal_position_no_source_tax/account.py | from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
| from openerp import models, api, fields
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
@api.v7
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
print 'fposition_id', fposition_id
if fposition_id:
taxes_without_src_ids = [
x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa
def map_tax(self, taxes):
result = super(account_fiscal_position, self).map_tax(taxes)
taxes_without_src_ids = [
x.tax_dest_id.id for x in self.tax_ids if not x.tax_src_id]
result += result.browse(taxes_without_src_ids)
return result
class account_fiscal_position_tax(models.Model):
_inherit = 'account.fiscal.position.tax'
tax_src_id = fields.Many2one(required=False)
| FIX fiscal position no source tax | FIX fiscal position no source tax
| Python | agpl-3.0 | csrocha/account_check,csrocha/account_check | ---
+++
@@ -9,9 +9,11 @@
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
result = super(account_fiscal_position, self).map_tax(
cr, uid, fposition_id, taxes, context=context)
- taxes_without_src_ids = [
- x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
- result = set(result) | set(taxes_without_src_ids)
+ print 'fposition_id', fposition_id
+ if fposition_id:
+ taxes_without_src_ids = [
+ x.tax_dest_id.id for x in fposition_id.tax_ids if not x.tax_src_id]
+ result = set(result) | set(taxes_without_src_ids)
return list(result)
@api.v8 # noqa |
5c341fc463840bc2e237e1529a43aa5915a70c77 | luhn/luhn.py | luhn/luhn.py | # File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
def Luhn(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd = digits[-1::-2]
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
def checksum(card_number):
return luhn(card_number)
| # File: luhn.py
# Purpose: Write a program that can take a number and determine whether
# or not it is valid per the Luhn formula.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Sunday 18 September 2016, 09:55 PM
def Luhn(card_number):
def digits_of(n):
return [int(d) for d in str(n)]
digits = digits_of(card_number)
odd = digits[-1::-2]
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
for d in even_digits:
checksum += sum(digits_of(d*2))
return checksum % 10
def checksum(card_number):
return luhn(card_number)
| Return the remainder of checksum | Return the remainder of checksum
| Python | mit | amalshehu/exercism-python | ---
+++
@@ -14,5 +14,8 @@
even = digits[-2::-2]
checksum = 0
checksum += sum(odd_digits)
+ for d in even_digits:
+ checksum += sum(digits_of(d*2))
+ return checksum % 10
def checksum(card_number):
return luhn(card_number) |
7cf2d39f4822a50f4a9347ba6c82498abc8e9bb7 | index/createIndex.py | index/createIndex.py | #!/usr/bin/python
import requests
import json
import os
myElasticServerIp = os.environ.get('ES_SERVER_IP', 'localhost')
myIndexName = os.environ.get('ES_INDEX_NAME', 'bestbuy-products')
def main():
deleteIndex()
createIndex()
def createIndex():
with open('mapping.json') as mappingFile:
jsonData = json.load(mappingFile)
type = 'product'
postUrl = 'http://' + myElasticServerIp + ':9200/' + myIndexName + '/'+type+'/'
print(postUrl)
response = requests.post(postUrl, data=json.dumps(jsonData))
print(response)
print(response.content)
def deleteIndex():
type = 'product'
response = requests.delete('http://' + myElasticServerIp + ':9200/' + myIndexName + '/')
print(response)
print(response.content)
main()
| #!/usr/bin/python
import requests
import json
import os
myElasticServerIp = os.environ.get('ES_SERVER_IP', 'localhost')
myIndexName = os.environ.get('ES_INDEX_NAME', 'bestbuy-products')
def main():
deleteIndex()
createIndex()
def createIndex():
with open('mapping.json') as mappingFile:
jsonData = json.load(mappingFile)
type = 'product'
postUrl = 'http://' + myElasticServerIp + ':9200/' + myIndexName + '/'
print(postUrl)
response = requests.post(postUrl, data=json.dumps(jsonData))
print(response)
print(response.content)
def deleteIndex():
type = 'product'
response = requests.delete('http://' + myElasticServerIp + ':9200/' + myIndexName + '/')
print(response)
print(response.content)
main()
| Remove type when creating index | Remove type when creating index | Python | mit | zpurcey/bestbuy-demo,zpurcey/bestbuy-demo,zpurcey/bestbuy-demo | ---
+++
@@ -14,7 +14,7 @@
with open('mapping.json') as mappingFile:
jsonData = json.load(mappingFile)
type = 'product'
- postUrl = 'http://' + myElasticServerIp + ':9200/' + myIndexName + '/'+type+'/'
+ postUrl = 'http://' + myElasticServerIp + ':9200/' + myIndexName + '/'
print(postUrl)
response = requests.post(postUrl, data=json.dumps(jsonData))
print(response) |
c4d2e4c4f49db961dae59780fa8f5ec351a11353 | projects/models.py | projects/models.py | # -*- encoding:utf-8 -*-
from django.db import models
class Project(models.Model):
STATUS = (
('unrevised', u'Неразгледан'),
('returned', u'Върнат за корекция'),
('pending', u'Предстои да бъде разгледан на СИС'),
('approved', u'Разгледан и одобрен на СИС'),
('rejected', u'Разгледан и неодобрен на СИС'))
user = models.ForeignKey('members.User')
name = models.CharField(max_length=100)
flp = models.ForeignKey('members.User', related_name='flp')
team = models.ManyToManyField('members.User', related_name='team')
description = models.TextField()
targets = models.TextField()
tasks = models.TextField()
target_group = models.TextField()
schedule = models.TextField()
resources = models.TextField()
finance_description = models.TextField()
partners = models.TextField(blank=True, null=True)
files = models.ManyToManyField('attachments.Attachment')
status = models.CharField(max_length=50,
choices=STATUS,
default='unrevised')
discussed_at = models.DateField(blank=True, null=True)
def __unicode__(self):
return self.name | # -*- encoding:utf-8 -*-
from django.db import models
class Project(models.Model):
STATUS = (
('unrevised', u'Неразгледан'),
('returned', u'Върнат за корекция'),
('pending', u'Предстои да бъде разгледан на СИС'),
('approved', u'Разгледан и одобрен на СИС'),
('rejected', u'Разгледан и неодобрен на СИС'))
user = models.ForeignKey('members.User')
name = models.CharField(max_length=100)
flp = models.ForeignKey('members.User', related_name='flp')
team = models.ManyToManyField('members.User', related_name='team')
description = models.TextField()
targets = models.TextField()
tasks = models.TextField()
target_group = models.TextField()
schedule = models.TextField()
resources = models.TextField()
finance_description = models.TextField()
partners = models.TextField(blank=True, null=True)
files = models.ManyToManyField('attachments.Attachment')
status = models.CharField(max_length=50,
choices=STATUS,
default='unrevised')
discussed_at = models.DateField(blank=True, null=True)
attitute = models.TextField(blank=True, null=True)
number = models.CharField(max_length=30, blank=True, null=True)
def __unicode__(self):
return self.name | Add incoming number for the project and attutude | Add incoming number for the project and attutude
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | ---
+++
@@ -27,6 +27,8 @@
choices=STATUS,
default='unrevised')
discussed_at = models.DateField(blank=True, null=True)
+ attitute = models.TextField(blank=True, null=True)
+ number = models.CharField(max_length=30, blank=True, null=True)
def __unicode__(self):
return self.name |
30a16da0089d0f7afa46fb129a6f426c75cbcd3b | modules/test_gitdata.py | modules/test_gitdata.py | from nose import with_setup
from nose.tools import *
import os
import sys
from gitdata import GitData
import simplejson as json
def test_fetch():
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
assert valid, "fetch_study(%s) returned valid JSON" % study_id
def test_write():
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
assert new_sha != "", "new_sha is non-empty"
def test_branch_exists():
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
assert exists == 0, "branch does not exist"
exists = gd.branch_exists("master")
assert exists, "master branch exists"
test_branch_exists()
test_fetch()
test_write()
| import unittest
import os
import sys
from gitdata import GitData
import simplejson as json
class TestGitData(unittest.TestCase):
def test_fetch(self):
gd = GitData(repo="./treenexus")
study_id = 438
study_nexson = gd.fetch_study(study_id)
valid = 1
try:
json.loads(study_nexson)
except:
valid = 0
self.assertTrue( valid, "fetch_study(%s) returned valid JSON" % study_id)
def test_write(self):
gd = GitData(repo="./treenexus")
author = "John Doe <john@doe.com>"
content = '{"foo":"bar"}'
study_id = 9999
branch = "johndoe_study_%s" % study_id
new_sha = gd.write_study(study_id,content,branch,author)
self.assertTrue( new_sha != "", "new_sha is non-empty")
def test_branch_exists(self):
gd = GitData(repo="./treenexus")
exists = gd.branch_exists("nothisdoesnotexist")
self.assertTrue( exists == 0, "branch does not exist")
exists = gd.branch_exists("master")
self.assertTrue( exists, "master branch exists")
def suite():
loader = unittest.TestLoader()
testsuite = loader.loadTestsFromTestCase(TestGitData)
return testsuite
def test_main():
testsuite = suite()
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
result = runner.run(testsuite)
if __name__ == "__main__":
test_main()
| Convert GitData tests to a unittest suite | Convert GitData tests to a unittest suite
| Python | bsd-2-clause | leto/new_opentree_api,leto/new_opentree_api | ---
+++
@@ -1,39 +1,48 @@
-from nose import with_setup
-from nose.tools import *
+import unittest
import os
import sys
from gitdata import GitData
import simplejson as json
-def test_fetch():
- gd = GitData(repo="./treenexus")
+class TestGitData(unittest.TestCase):
+ def test_fetch(self):
+ gd = GitData(repo="./treenexus")
- study_id = 438
- study_nexson = gd.fetch_study(study_id)
- valid = 1
- try:
- json.loads(study_nexson)
- except:
- valid = 0
- assert valid, "fetch_study(%s) returned valid JSON" % study_id
+ study_id = 438
+ study_nexson = gd.fetch_study(study_id)
+ valid = 1
+ try:
+ json.loads(study_nexson)
+ except:
+ valid = 0
+ self.assertTrue( valid, "fetch_study(%s) returned valid JSON" % study_id)
-def test_write():
- gd = GitData(repo="./treenexus")
- author = "John Doe <john@doe.com>"
- content = '{"foo":"bar"}'
- study_id = 999
- branch = "johndoe_study_%s" % study_id
- new_sha = gd.write_study(study_id,content,branch,author)
- assert new_sha != "", "new_sha is non-empty"
+ def test_write(self):
+ gd = GitData(repo="./treenexus")
+ author = "John Doe <john@doe.com>"
+ content = '{"foo":"bar"}'
+ study_id = 9999
+ branch = "johndoe_study_%s" % study_id
+ new_sha = gd.write_study(study_id,content,branch,author)
+ self.assertTrue( new_sha != "", "new_sha is non-empty")
-def test_branch_exists():
- gd = GitData(repo="./treenexus")
- exists = gd.branch_exists("nothisdoesnotexist")
- assert exists == 0, "branch does not exist"
+ def test_branch_exists(self):
+ gd = GitData(repo="./treenexus")
+ exists = gd.branch_exists("nothisdoesnotexist")
+ self.assertTrue( exists == 0, "branch does not exist")
- exists = gd.branch_exists("master")
- assert exists, "master branch exists"
+ exists = gd.branch_exists("master")
+ self.assertTrue( exists, "master branch exists")
-test_branch_exists()
-test_fetch()
-test_write()
+def suite():
+ loader = unittest.TestLoader()
+ testsuite = loader.loadTestsFromTestCase(TestGitData)
+ return testsuite
+
+def test_main():
+ testsuite = suite()
+ runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
+ result = runner.run(testsuite)
+
+if __name__ == "__main__":
+ test_main() |
9beb8378831f33c2256b5a7bf73f24d155122bea | nasa_data.py | nasa_data.py |
import requests
import os
def get_apod():
os.makedirs("APODs", exist_ok=True)
try:
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY").json()
image_url = apod_data["url"]
if image_url.endswith(".gif"):
return
image_data = requests.get(image_url, stream=True)
except requests.HTTPError:
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile:
for chunk in image_data.iter_content(100000):
imagefile.write(chunk)
return os.path.abspath((os.path.join("APODs", os.path.basename(image_url))))
|
import requests
import os
def get_apod():
os.makedirs("APODs", exist_ok=True)
try:
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY").json()
image_url = apod_data["url"]
if image_url.endswith(".gif"):
raise TypeError
image_data = requests.get(image_url, stream=True)
except (requests.HTTPError or TypeError):
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile:
for chunk in image_data.iter_content(100000):
imagefile.write(chunk)
return os.path.abspath((os.path.join("APODs", os.path.basename(image_url))))
| Update 0.6.4 - Fixed exception error | Update 0.6.4
- Fixed exception error
| Python | mit | FXelix/space_facts_bot | ---
+++
@@ -10,9 +10,9 @@
apod_data = requests.get("https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY").json()
image_url = apod_data["url"]
if image_url.endswith(".gif"):
- return
+ raise TypeError
image_data = requests.get(image_url, stream=True)
- except requests.HTTPError:
+ except (requests.HTTPError or TypeError):
return
with open(os.path.join("APODs", os.path.basename(image_url)), "wb") as imagefile: |
f35494ebc7c710af45c8973eb1c2b4d31ec1c7c0 | tests/fakes.py | tests/fakes.py | class FakeHttpRequest(object):
def __init__(self, method='GET', body=''):
self.method = method.upper()
self.body = body
class FakeHttpResponse(object):
def __init__(self, body, content_type='text/html'):
self.body = body
self.content_type = content_type
self.status_code = 200
class FakeModel(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
| import six
class FakeHttpRequest(object):
def __init__(self, method='GET', body=''):
self.method = method.upper()
self.body = body
if six.PY3:
self.body = body.encode('utf-8')
class FakeHttpResponse(object):
def __init__(self, body, content_type='text/html'):
self.body = body
self.content_type = content_type
self.status_code = 200
class FakeModel(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
| Update tests to reproduce bug | Update tests to reproduce bug
| Python | bsd-3-clause | pobear/restless,viniciuscainelli/restless,toastdriven/restless,tonybajan/restless,CraveFood/restkiss,jangeador/restless | ---
+++
@@ -1,7 +1,12 @@
+import six
+
+
class FakeHttpRequest(object):
def __init__(self, method='GET', body=''):
self.method = method.upper()
self.body = body
+ if six.PY3:
+ self.body = body.encode('utf-8')
class FakeHttpResponse(object): |
ed85bde14d8c37144352d7526c674c26fa577407 | dnsimple/record.py | dnsimple/record.py | from .model import Model
class Record(Model, object):
def __init__(self, request, domain, attributes):
self.domain = domain
super(Record, self).__init__(request, attributes)
def update(self, attributes):
success = False
self.assign(attributes)
response = self.request.put(
'domains/{0}/records/{1}'.format(self.domain.name, self.id),
{'record': attributes}
)
return response.was_successful()
def delete(self):
response = self.request.delete('domains/{0}/records/{1}'.format(self.domain.name, self.id))
return response.was_successful()
| from .model import Model
class Record(Model, object):
def __init__(self, request, domain, attributes):
self.domain = domain
super(Record, self).__init__(request, attributes)
def update(self, attributes):
self.assign(attributes)
response = self.request.put(
'domains/{0}/records/{1}'.format(self.domain.name, self.id),
{'record': attributes}
)
return response.was_successful()
def delete(self):
response = self.request.delete('domains/{0}/records/{1}'.format(self.domain.name, self.id))
return response.was_successful()
| Remove unused assignment in `Record` class | Remove unused assignment in `Record` class | Python | mit | vigetlabs/dnsimple | ---
+++
@@ -8,8 +8,6 @@
super(Record, self).__init__(request, attributes)
def update(self, attributes):
- success = False
-
self.assign(attributes)
response = self.request.put( |
304713ca8731c2ef27743abb772456d55ad0f3a8 | python/ql/test/library-tests/frameworks/django-v2-v3/testapp/urls.py | python/ql/test/library-tests/frameworks/django-v2-v3/testapp/urls.py | from django.urls import path, re_path
# This version 1.x way of defining urls is deprecated in Django 3.1, but still works
from django.conf.urls import url
from . import views
urlpatterns = [
path("foo/", views.foo), # $routeSetup="foo/"
# TODO: Doesn't include standard `$` to mark end of string, due to problems with
# inline expectation tests (which thinks the `$` would mark the beginning of a new
# line)
re_path(r"^ba[rz]/", views.bar_baz), # $routeSetup="^ba[rz]/"
url(r"^deprecated/", views.deprecated), # $routeSetup="^deprecated/"
path("basic-view-handler/", views.MyBasicViewHandler.as_view()), # $routeSetup="basic-view-handler/"
path("custom-inheritance-view-handler/", views.MyViewHandlerWithCustomInheritance.as_view()), # $routeSetup="custom-inheritance-view-handler/"
path("CustomRedirectView/<foo>", views.CustomRedirectView.as_view()), # $routeSetup="CustomRedirectView/<foo>"
path("CustomRedirectView2/<foo>", views.CustomRedirectView2.as_view()), # $routeSetup="CustomRedirectView2/<foo>"
]
| from django.urls import path, re_path
from . import views
urlpatterns = [
path("foo/", views.foo), # $routeSetup="foo/"
# TODO: Doesn't include standard `$` to mark end of string, due to problems with
# inline expectation tests (which thinks the `$` would mark the beginning of a new
# line)
re_path(r"^ba[rz]/", views.bar_baz), # $routeSetup="^ba[rz]/"
path("basic-view-handler/", views.MyBasicViewHandler.as_view()), # $routeSetup="basic-view-handler/"
path("custom-inheritance-view-handler/", views.MyViewHandlerWithCustomInheritance.as_view()), # $routeSetup="custom-inheritance-view-handler/"
path("CustomRedirectView/<foo>", views.CustomRedirectView.as_view()), # $routeSetup="CustomRedirectView/<foo>"
path("CustomRedirectView2/<foo>", views.CustomRedirectView2.as_view()), # $routeSetup="CustomRedirectView2/<foo>"
]
from django import __version__ as django_version
if django_version[0] == "3":
# This version 1.x way of defining urls is deprecated in Django 3.1, but still works.
# However, it is removed in Django 4.0, so we need this guard to make our code runnable
from django.conf.urls import url
old_urlpatterns = urlpatterns
# we need this assignment to get our logic working... maybe it should be more
# sophisticated?
urlpatterns = [
url(r"^deprecated/", views.deprecated), # $routeSetup="^deprecated/"
]
urlpatterns += old_urlpatterns
| Handle django v4 as well in tests | Python: Handle django v4 as well in tests
| Python | mit | github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql | ---
+++
@@ -1,7 +1,4 @@
from django.urls import path, re_path
-
-# This version 1.x way of defining urls is deprecated in Django 3.1, but still works
-from django.conf.urls import url
from . import views
@@ -11,7 +8,6 @@
# inline expectation tests (which thinks the `$` would mark the beginning of a new
# line)
re_path(r"^ba[rz]/", views.bar_baz), # $routeSetup="^ba[rz]/"
- url(r"^deprecated/", views.deprecated), # $routeSetup="^deprecated/"
path("basic-view-handler/", views.MyBasicViewHandler.as_view()), # $routeSetup="basic-view-handler/"
path("custom-inheritance-view-handler/", views.MyViewHandlerWithCustomInheritance.as_view()), # $routeSetup="custom-inheritance-view-handler/"
@@ -19,3 +15,20 @@
path("CustomRedirectView/<foo>", views.CustomRedirectView.as_view()), # $routeSetup="CustomRedirectView/<foo>"
path("CustomRedirectView2/<foo>", views.CustomRedirectView2.as_view()), # $routeSetup="CustomRedirectView2/<foo>"
]
+
+from django import __version__ as django_version
+
+if django_version[0] == "3":
+ # This version 1.x way of defining urls is deprecated in Django 3.1, but still works.
+ # However, it is removed in Django 4.0, so we need this guard to make our code runnable
+ from django.conf.urls import url
+
+ old_urlpatterns = urlpatterns
+
+ # we need this assignment to get our logic working... maybe it should be more
+ # sophisticated?
+ urlpatterns = [
+ url(r"^deprecated/", views.deprecated), # $routeSetup="^deprecated/"
+ ]
+
+ urlpatterns += old_urlpatterns |
bb0728a6e73f3995968ee7c59ffcb03fae65d983 | quotes/templatetags/quote_tags.py | quotes/templatetags/quote_tags.py | from django import template
from quotes.models import Quote
register = template.Library()
@register.inclusion_tag('quotes/random_quote.html')
def show_random_quote():
"""
For generating a single random quote
"""
random_quote = Quote.objects.order_by('?')[0]
return {'random_quote': random_quote} | from django import template
from quotes.models import Quote
register = template.Library()
@register.inclusion_tag('quotes/random_quote.html')
def show_random_quote():
"""
For generating a single random quote
"""
try:
random_quote = Quote.objects.order_by('?')[0]
except ValueError:
print "There are no quotes" | Fix for allowing for empty database | Fix for allowing for empty database | Python | bsd-3-clause | davemerwin/django-quotes | ---
+++
@@ -8,5 +8,7 @@
"""
For generating a single random quote
"""
- random_quote = Quote.objects.order_by('?')[0]
- return {'random_quote': random_quote}
+ try:
+ random_quote = Quote.objects.order_by('?')[0]
+ except ValueError:
+ print "There are no quotes" |
724b80b44229b531d7a11cb7cc9f6ad88d9aedb0 | bnw_handlers/command_userinfo.py | bnw_handlers/command_userinfo.py | from twisted.internet import defer
import bnw_core.bnw_objects as objs
@defer.inlineCallbacks
def cmd_userinfo(request, user=''):
if not user:
defer.returnValue(dict(ok=False, desc='Username required.'))
user_obj = yield objs.User.find_one({'name': user})
subscribers = yield objs.Subscription.find(dict(
target=user, type='sub_user'))
subscribers = set([x['user'] for x in subscribers])
subscriptions = yield objs.Subscription.find(dict(
user=user, type='sub_user'))
subscriptions = set([x['target'] for x in subscriptions])
friends = list(subscribers & subscriptions)
friends.sort()
subscribers_only = list(subscribers - subscriptions)
subscribers_only.sort()
subscriptions_only = list(subscriptions - subscribers)
subscriptions_only.sort()
messages_count = int((yield objs.Message.count({'user': user})))
comments_count = int((yield objs.Comment.count({'user': user})))
vcard = user_obj.get('vcard', {})
about = user_obj.get('settings', {}).get('about', '')
if not about:
about = vcard.get('desc', '')
defer.returnValue({
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count,
'comments_count': comments_count,
'subscribers': subscribers_only,
'subscriptions': subscriptions_only,
'friends': friends,
'vcard': vcard,
'about': about,
})
| from twisted.internet import defer
import bnw_core.bnw_objects as objs
@defer.inlineCallbacks
def cmd_userinfo(request, user=''):
if not user:
defer.returnValue(dict(ok=False, desc='Username required.'))
user_obj = yield objs.User.find_one({'name': user})
subscribers = yield objs.Subscription.find(dict(
target=user, type='sub_user'))
subscribers = set([x['user'] for x in subscribers])
subscriptions = yield objs.Subscription.find(dict(
user=user, type='sub_user'))
subscriptions = set([x['target'] for x in subscriptions])
friends = list(subscribers & subscriptions)
friends.sort()
subscribers_only = list(subscribers - subscriptions)
subscribers_only.sort()
subscriptions_only = list(subscriptions - subscribers)
subscriptions_only.sort()
messages_count = int((yield objs.Message.count({'user': user})))
comments_count = int((yield objs.Comment.count({'user': user})))
vcard = user_obj.get('vcard', {})
about = user_obj.get('settings', {}).get('about', '')
if not about:
about = vcard.get('desc', '')
defer.returnValue({
'ok': True,
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count,
'comments_count': comments_count,
'subscribers': subscribers_only,
'subscriptions': subscriptions_only,
'friends': friends,
'vcard': vcard,
'about': about,
})
| Fix userinfo api command (send ok=True) | Fix userinfo api command (send ok=True)
| Python | bsd-2-clause | ojab/bnw,stiletto/bnw,stiletto/bnw,ojab/bnw,un-def/bnw,ojab/bnw,stiletto/bnw,ojab/bnw,un-def/bnw,un-def/bnw,un-def/bnw,stiletto/bnw | ---
+++
@@ -27,6 +27,7 @@
if not about:
about = vcard.get('desc', '')
defer.returnValue({
+ 'ok': True,
'user': user,
'regdate': user_obj.get('regdate', 0),
'messages_count': messages_count, |
3c01227bfef6e8cabdca9fe9fe620763a28bff88 | colorlog/logging.py | colorlog/logging.py | """Wrappers around the logging module."""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(**kwargs):
"""Call ``logging.basicConfig`` and override the formatter it creates."""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(
fmt=kwargs.get('format', BASIC_FORMAT),
datefmt=kwargs.get('datefmt', None)))
finally:
logging._releaseLock()
def ensure_configured(func):
"""Modify a function to call ``basicConfig`` first if no handlers exist."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
StreamHandler = logging.StreamHandler
| """Wrappers around the logging module."""
from __future__ import absolute_import
import functools
import logging
from colorlog.colorlog import ColoredFormatter
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
def basicConfig(style='%', log_colors=None, reset=True, secondary_log_colors=None, **kwargs):
"""Call ``logging.basicConfig`` and override the formatter it creates."""
logging.basicConfig(**kwargs)
logging._acquireLock()
try:
stream = logging.root.handlers[0]
stream.setFormatter(
ColoredFormatter(
fmt=kwargs.get('format', BASIC_FORMAT),
datefmt=kwargs.get('datefmt', None),
style=style,
log_colors=log_colors,
reset=reset,
secondary_log_colors=secondary_log_colors
))
finally:
logging._releaseLock()
def ensure_configured(func):
"""Modify a function to call ``basicConfig`` first if no handlers exist."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if len(logging.root.handlers) == 0:
basicConfig()
return func(*args, **kwargs)
return wrapper
root = logging.root
getLogger = logging.getLogger
debug = ensure_configured(logging.debug)
info = ensure_configured(logging.info)
warning = ensure_configured(logging.warning)
error = ensure_configured(logging.error)
critical = ensure_configured(logging.critical)
log = ensure_configured(logging.log)
exception = ensure_configured(logging.exception)
StreamHandler = logging.StreamHandler
| Add extra parameters for ColoredFormatter to the basicConfig | Add extra parameters for ColoredFormatter to the basicConfig
| Python | mit | borntyping/python-colorlog | ---
+++
@@ -10,7 +10,7 @@
BASIC_FORMAT = "%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s"
-def basicConfig(**kwargs):
+def basicConfig(style='%', log_colors=None, reset=True, secondary_log_colors=None, **kwargs):
"""Call ``logging.basicConfig`` and override the formatter it creates."""
logging.basicConfig(**kwargs)
logging._acquireLock()
@@ -19,7 +19,12 @@
stream.setFormatter(
ColoredFormatter(
fmt=kwargs.get('format', BASIC_FORMAT),
- datefmt=kwargs.get('datefmt', None)))
+ datefmt=kwargs.get('datefmt', None),
+ style=style,
+ log_colors=log_colors,
+ reset=reset,
+ secondary_log_colors=secondary_log_colors
+ ))
finally:
logging._releaseLock()
|
1afebd46c8cf786673adface7724e9488df17a7e | appengine-experimental/src/models.py | appengine-experimental/src/models.py | from datetime import datetime, timedelta
from google.appengine.ext import db
class CHPIncident(db.Model):
CenterID = db.StringProperty(required=True)
DispatchID = db.StringProperty(required=True)
LogID = db.StringProperty(required=True)
LogTime = db.DateTimeProperty()
LogType = db.StringProperty()
LogTypeID = db.StringProperty()
Location = db.StringProperty()
Area = db.StringProperty()
ThomasBrothers = db.StringProperty()
TBXY = db.StringProperty()
LogDetails = db.BlobProperty()
geolocation = db.GeoPtProperty()
created = db.DateTimeProperty(auto_now_add=True)
updated = db.DateTimeProperty(auto_now=True)
modified = db.DateTimeProperty()
def getStatus(self):
if self.created > datetime.utcnow() - timedelta(minutes=5):
# less than 5 min old == new
return 'new'
elif self.updated < datetime.utcnow() - timedelta(minutes=5):
# not updated in 5 min == inactive
return 'inactive'
else:
return 'active'
| from datetime import datetime, timedelta
from google.appengine.ext import db
class CHPIncident(db.Model):
CenterID = db.StringProperty(required=True)
DispatchID = db.StringProperty(required=True)
LogID = db.StringProperty(required=True)
LogTime = db.DateTimeProperty()
LogType = db.StringProperty()
LogTypeID = db.StringProperty()
Location = db.StringProperty()
Area = db.StringProperty()
ThomasBrothers = db.StringProperty()
TBXY = db.StringProperty()
LogDetails = db.BlobProperty()
geolocation = db.GeoPtProperty()
created = db.DateTimeProperty(auto_now_add=True)
updated = db.DateTimeProperty(auto_now=True)
modified = db.DateTimeProperty()
def getStatus(self):
if self.created > datetime.utcnow() - timedelta(minutes=5):
# less than 5 min old == new
return 'new'
elif self.updated < datetime.utcnow() - timedelta(minutes=6):
# not updated in the last update == inactive
#
# Note this is the cronned update interval + 1.
return 'inactive'
else:
return 'active'
| Tweak the "inactive" timeout a bit to take latency into account. | Tweak the "inactive" timeout a bit to take latency into account.
| Python | isc | lectroidmarc/SacTraffic,lectroidmarc/SacTraffic | ---
+++
@@ -24,8 +24,10 @@
if self.created > datetime.utcnow() - timedelta(minutes=5):
# less than 5 min old == new
return 'new'
- elif self.updated < datetime.utcnow() - timedelta(minutes=5):
- # not updated in 5 min == inactive
+ elif self.updated < datetime.utcnow() - timedelta(minutes=6):
+ # not updated in the last update == inactive
+ #
+ # Note this is the cronned update interval + 1.
return 'inactive'
else:
return 'active' |
cb1de4cc77e3368ed730c14044f76f2d20c3d909 | dashi/generator.py | dashi/generator.py | import asyncio
import collections
import dashi.config
import dashi.db
import dashi.time
import datetime
import functools
import jinja2
import logging
import os
import pprint
LOGGER = logging.getLogger(__name__)
@asyncio.coroutine
def go():
config = dashi.config.parse()
template_loader = jinja2.FileSystemLoader(searchpath=config.template_path)
template_environment = jinja2.Environment(loader=template_loader)
connection = dashi.db.connection()
authors = dashi.db.get_all_authors(connection)
print(authors)
return
LOGGER.debug(repo_stats)
try:
os.mkdir(config.output_path)
except OSError:
pass
template = template_environment.get_template('index.html')
output = template.render(repo_stats=repo_stats)
path = os.path.join(config.output_path, 'index.html')
with open(path, 'w') as f:
f.write(output)
| import asyncio
import collections
import dashi.config
import dashi.db
import dashi.time
import datetime
import functools
import jinja2
import logging
import os
import pprint
LOGGER = logging.getLogger(__name__)
@asyncio.coroutine
def go():
config = dashi.config.parse()
template_loader = jinja2.FileSystemLoader(searchpath=config['paths']['template'])
template_environment = jinja2.Environment(loader=template_loader)
connection = dashi.db.connection()
authors = dashi.db.get_all_authors(connection)
return
LOGGER.debug(repo_stats)
try:
os.mkdir(config['paths']['output'])
except OSError:
pass
template = template_environment.get_template('index.html')
output = template.render(repo_stats=repo_stats)
path = os.path.join(config['paths']['output'], 'index.html')
with open(path, 'w') as f:
f.write(output)
| Update more references to the old config style | Update more references to the old config style
| Python | mit | EliRibble/dashi,EliRibble/dashi | ---
+++
@@ -15,23 +15,24 @@
@asyncio.coroutine
def go():
config = dashi.config.parse()
- template_loader = jinja2.FileSystemLoader(searchpath=config.template_path)
+ template_loader = jinja2.FileSystemLoader(searchpath=config['paths']['template'])
template_environment = jinja2.Environment(loader=template_loader)
connection = dashi.db.connection()
+
authors = dashi.db.get_all_authors(connection)
- print(authors)
- return
+ return
+
LOGGER.debug(repo_stats)
try:
- os.mkdir(config.output_path)
+ os.mkdir(config['paths']['output'])
except OSError:
pass
template = template_environment.get_template('index.html')
output = template.render(repo_stats=repo_stats)
- path = os.path.join(config.output_path, 'index.html')
+ path = os.path.join(config['paths']['output'], 'index.html')
with open(path, 'w') as f:
f.write(output)
|
8948536f0faf12d36b0da48ae5f45d00a022ebb7 | InvenTree/InvenTree/helpers.py | InvenTree/InvenTree/helpers.py | import io
from wsgiref.util import FileWrapper
from django.http import StreamingHttpResponse
def WrapWithQuotes(text):
# TODO - Make this better
if not text.startswith('"'):
text = '"' + text
if not text.endswith('"'):
text = text + '"'
return text
def DownloadFile(data, filename, content_type='application/text'):
"""
Create a dynamic file for the user to download.
@param data is the raw file data
"""
filename = WrapWithQuotes(filename)
wrapper = FileWrapper(io.StringIO(data))
response = StreamingHttpResponse(wrapper, content_type=content_type)
response['Content-Length'] = len(data)
response['Content-Disposition'] = 'attachment; filename={f}'.format(f=filename)
return response
| import io
from wsgiref.util import FileWrapper
from django.http import StreamingHttpResponse
def WrapWithQuotes(text):
# TODO - Make this better
if not text.startswith('"'):
text = '"' + text
if not text.endswith('"'):
text = text + '"'
return text
def DownloadFile(data, filename, content_type='application/text'):
"""
Create a dynamic file for the user to download.
@param data is the raw file data
"""
filename = WrapWithQuotes(filename)
if type(data) == str:
wrapper = FileWrapper(io.StringIO(data))
else:
wrapper = FileWrapper(io.BytesIO(data))
response = StreamingHttpResponse(wrapper, content_type=content_type)
response['Content-Length'] = len(data)
response['Content-Disposition'] = 'attachment; filename={f}'.format(f=filename)
return response
| Allow export of binary file data | Allow export of binary file data
- Use io.BytesIO for non-string-data file objects
| Python | mit | inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree | ---
+++
@@ -23,7 +23,10 @@
filename = WrapWithQuotes(filename)
- wrapper = FileWrapper(io.StringIO(data))
+ if type(data) == str:
+ wrapper = FileWrapper(io.StringIO(data))
+ else:
+ wrapper = FileWrapper(io.BytesIO(data))
response = StreamingHttpResponse(wrapper, content_type=content_type)
response['Content-Length'] = len(data) |
fbb532473bc6434628c6f01fabb8eae3ad60a171 | nn/linear.py | nn/linear.py | import tensorflow as tf
from . import var_init
from .util import static_shape
from .variable import variable
def linear(x, output_layer_size):
weight = variable([static_shape(x)[1], output_layer_size])
bias = variable([output_layer_size])
tf.add_to_collection(tf.GraphKeys.WEIGHTS, weight)
tf.add_to_collection(tf.GraphKeys.BIASES, bias)
return tf.matmul(x, weight) + bias
| import tensorflow as tf
from .util import static_shape
from .variable import variable
def linear(x, output_layer_size):
weight = variable([static_shape(x)[1], output_layer_size])
bias = variable([output_layer_size])
tf.add_to_collection(tf.GraphKeys.WEIGHTS, weight)
tf.add_to_collection(tf.GraphKeys.BIASES, bias)
return tf.matmul(x, weight) + bias
| Remove an extra import statement | Remove an extra import statement
| Python | unlicense | raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten | ---
+++
@@ -1,6 +1,5 @@
import tensorflow as tf
-from . import var_init
from .util import static_shape
from .variable import variable
|
dcecd75cae428bb27ec8759a21e52267a55f149a | django_comments/signals.py | django_comments/signals.py | """
Signals relating to comments.
"""
from django.dispatch import Signal
# Sent just before a comment will be posted (after it's been approved and
# moderated; this can be used to modify the comment (in place) with posting
# details or other such actions. If any receiver returns False the comment will be
# discarded and a 400 response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
comment_will_be_posted = Signal(providing_args=["comment", "request"])
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
comment_was_posted = Signal(providing_args=["comment", "request"])
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
comment_was_flagged = Signal(providing_args=["comment", "flag", "created", "request"])
| """
Signals relating to comments.
"""
from django.dispatch import Signal
# Sent just before a comment will be posted (after it's been approved and
# moderated; this can be used to modify the comment (in place) with posting
# details or other such actions. If any receiver returns False the comment will be
# discarded and a 400 response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
# Arguments: "comment", "request"
comment_will_be_posted = Signal()
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
# Arguments: "comment", "request"
comment_was_posted = Signal()
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
# Arguments: "comment", "flag", "created", "request"
comment_was_flagged = Signal()
| Remove Signal(providing_args) argument b/c it is deprecated | Remove Signal(providing_args) argument b/c it is deprecated
RemovedInDjango40Warning: The providing_args argument is deprecated.
As it is purely documentational, it has no replacement. If you rely
on this argument as documentation, you can move the text to a code
comment or docstring.
| Python | bsd-3-clause | django/django-contrib-comments,django/django-contrib-comments | ---
+++
@@ -9,13 +9,16 @@
# discarded and a 400 response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
-comment_will_be_posted = Signal(providing_args=["comment", "request"])
+# Arguments: "comment", "request"
+comment_will_be_posted = Signal()
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
-comment_was_posted = Signal(providing_args=["comment", "request"])
+# Arguments: "comment", "request"
+comment_was_posted = Signal()
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
-comment_was_flagged = Signal(providing_args=["comment", "flag", "created", "request"])
+# Arguments: "comment", "flag", "created", "request"
+comment_was_flagged = Signal() |
75dca1c2d39126556ded5f328461986a9eabb230 | django_webtest/backends.py | django_webtest/backends.py | from __future__ import absolute_import
from django.contrib.auth.backends import RemoteUserBackend
from .compat import from_wsgi_safe_string
class WebtestUserBackend(RemoteUserBackend):
""" Auth backend for django-webtest auth system """
def authenticate(self, request, django_webtest_user):
return super(WebtestUserBackend, self).authenticate(
request, django_webtest_user)
def clean_username(self, username):
return from_wsgi_safe_string(username)
| from __future__ import absolute_import
from django.utils.version import get_complete_version
from django.contrib.auth.backends import RemoteUserBackend
from .compat import from_wsgi_safe_string
class WebtestUserBackend(RemoteUserBackend):
""" Auth backend for django-webtest auth system """
if get_complete_version() >= (1, 11):
def authenticate(self, request, django_webtest_user):
return super(WebtestUserBackend, self).authenticate(
request, django_webtest_user)
else:
def authenticate(self, django_webtest_user):
return super(WebtestUserBackend, self).authenticate(
django_webtest_user)
def clean_username(self, username):
return from_wsgi_safe_string(username)
| Define WebtestUserBackend.authenticate based on Django version | Define WebtestUserBackend.authenticate based on Django version
This is required because the signature of RemoteUserBackend.authenticate
has changed in Django 1.11.
| Python | mit | django-webtest/django-webtest,kmike/django-webtest,django-webtest/django-webtest,kmike/django-webtest | ---
+++
@@ -1,14 +1,19 @@
from __future__ import absolute_import
+from django.utils.version import get_complete_version
from django.contrib.auth.backends import RemoteUserBackend
from .compat import from_wsgi_safe_string
-
class WebtestUserBackend(RemoteUserBackend):
""" Auth backend for django-webtest auth system """
- def authenticate(self, request, django_webtest_user):
- return super(WebtestUserBackend, self).authenticate(
- request, django_webtest_user)
+ if get_complete_version() >= (1, 11):
+ def authenticate(self, request, django_webtest_user):
+ return super(WebtestUserBackend, self).authenticate(
+ request, django_webtest_user)
+ else:
+ def authenticate(self, django_webtest_user):
+ return super(WebtestUserBackend, self).authenticate(
+ django_webtest_user)
def clean_username(self, username):
return from_wsgi_safe_string(username) |
03d628abc4711bb0de4a7a0ef13cc4c0ecb92032 | opps/articles/tests/models.py | opps/articles/tests/models.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from opps.articles.models import Post
class PostModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
self.assertEqual(len(post), 1)
self.assertEqual(post[0].slug, u'test-post-application')
self.assertEqual(post[0].title, u'test post application')
self.assertTrue(post[0].short_url)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from opps.articles.models import Post
class PostModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
self.assertEqual(len(post), 1)
self.assertEqual(post[0].slug, u'test-post-application')
self.assertEqual(post[0].title, u'test post application')
self.assertTrue(post[0].short_url)
def test_child_class(self):
post = Post.objects.get(id=1)
self.assertTrue(post.child_class)
self.assertEqual(post.child_class, 'Post')
| Add test articles (post), check child_class | Add test articles (post), check child_class
| Python | mit | YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,opps/opps,jeanmask/opps,opps/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps | ---
+++
@@ -17,3 +17,9 @@
self.assertEqual(post[0].slug, u'test-post-application')
self.assertEqual(post[0].title, u'test post application')
self.assertTrue(post[0].short_url)
+
+ def test_child_class(self):
+ post = Post.objects.get(id=1)
+
+ self.assertTrue(post.child_class)
+ self.assertEqual(post.child_class, 'Post') |
dc1b2ce87bdd5aa3b891d43f5d8b5c465dc909d1 | project_user_story/project.py | project_user_story/project.py | # -*- coding: utf-8 -*-
from openerp import models, fields, api, _
class task(models.Model):
_inherit = 'project.task'
user_story = fields.Boolean(
'Is User Story?',
default=False)
@api.multi
def action_open_task(self):
return {
'name': _('User Story'),
'view_type': 'form',
'view_mode': 'form',
# 'view_id': [res_id],
'res_model': 'project.task',
# 'context': "{'type':'out_invoice'}",
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': self.id,
}
class project(models.Model):
_inherit = 'project.project'
user_story_ids = fields.One2many(
'project.task',
'project_id',
domain=[('user_story', '=', True)],
context={'default_user_story': True},
string='User Stories',
)
| # -*- coding: utf-8 -*-
from openerp import models, fields, api, _
class task(models.Model):
_inherit = 'project.task'
user_story = fields.Boolean(
'Is User Story?',
default=False)
@api.multi
def action_open_task(self):
print 'context', self._context
return {
'name': _('User Story'),
'view_type': 'form',
'view_mode': 'form',
# 'view_id': [res_id],
'res_model': 'project.task',
'context': self._context,
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': self.id,
}
class project(models.Model):
_inherit = 'project.project'
user_story_ids = fields.One2many(
'project.task',
'project_id',
domain=[('user_story', '=', True)],
context={'default_user_story': True},
string='User Stories',
)
| FIX don loose context when creating from story form view | FIX don loose context when creating from story form view
| Python | agpl-3.0 | levkar/odoo-addons,ingadhoc/partner,ingadhoc/odoo-addons,ingadhoc/stock,dvitme/odoo-addons,ingadhoc/product,ingadhoc/account-analytic,ingadhoc/product,adhoc-dev/odoo-addons,sysadminmatmoz/ingadhoc,maljac/odoo-addons,levkar/odoo-addons,maljac/odoo-addons,ClearCorp/account-financial-tools,sysadminmatmoz/ingadhoc,adhoc-dev/odoo-addons,adhoc-dev/account-financial-tools,ingadhoc/account-invoicing,levkar/odoo-addons,bmya/odoo-addons,adhoc-dev/odoo-addons,HBEE/odoo-addons,HBEE/odoo-addons,ClearCorp/account-financial-tools,levkar/odoo-addons,HBEE/odoo-addons,syci/ingadhoc-odoo-addons,jorsea/odoo-addons,jorsea/odoo-addons,bmya/odoo-addons,ingadhoc/sale,sysadminmatmoz/ingadhoc,ingadhoc/account-payment,ingadhoc/account-financial-tools,ingadhoc/sale,ingadhoc/odoo-addons,maljac/odoo-addons,ingadhoc/sale,adhoc-dev/account-financial-tools,bmya/odoo-addons,dvitme/odoo-addons,dvitme/odoo-addons,ingadhoc/sale,jorsea/odoo-addons,syci/ingadhoc-odoo-addons,ingadhoc/odoo-addons,syci/ingadhoc-odoo-addons | ---
+++
@@ -11,13 +11,14 @@
@api.multi
def action_open_task(self):
+ print 'context', self._context
return {
'name': _('User Story'),
'view_type': 'form',
'view_mode': 'form',
# 'view_id': [res_id],
'res_model': 'project.task',
- # 'context': "{'type':'out_invoice'}",
+ 'context': self._context,
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current', |
eefc9f9757c0cf21c2ca06a791d03c4dea608fd2 | tools/update_chipmunk_src.py | tools/update_chipmunk_src.py |
import sys, os.path
import subprocess
import shutil
pymunk_src_path = os.path.join("..", "chipmunk_src")
shutil.rmtree(os.path.join(pymunk_src_path, "src"), True)
shutil.rmtree(os.path.join(pymunk_src_path, "include"), True)
if len(sys.argv) > 1:
chipmunk_git_path = sys.argv[1]
else:
chipmunk_git_path = raw_input("Enter path to chipmunk source")
shutil.copytree(os.path.join(chipmunk_git_path,"src"), os.path.join(pymunk_src_path,"src"))
shutil.copytree(os.path.join(chipmunk_git_path,"include"), os.path.join(pymunk_src_path,"include"))
subprocess.call("git rev-parse HEAD", shell=True)
print("Remember to update git version string of chipmunk!")
|
import sys, os.path
import subprocess
import shutil
pymunk_src_path = os.path.join("..", "chipmunk_src")
shutil.rmtree(os.path.join(pymunk_src_path, "src"), True)
shutil.rmtree(os.path.join(pymunk_src_path, "include"), True)
if len(sys.argv) > 1:
chipmunk_git_path = sys.argv[1]
else:
chipmunk_git_path = input("Enter path to chipmunk source")
shutil.copytree(os.path.join(chipmunk_git_path,"src"), os.path.join(pymunk_src_path,"src"))
shutil.copytree(os.path.join(chipmunk_git_path,"include"), os.path.join(pymunk_src_path,"include"))
subprocess.call("git rev-parse HEAD", shell=True)
print("Remember to update git version string of chipmunk!")
| Fix update chipmunk src script to run on python 3 | Fix update chipmunk src script to run on python 3
| Python | mit | viblo/pymunk,viblo/pymunk | ---
+++
@@ -11,7 +11,7 @@
if len(sys.argv) > 1:
chipmunk_git_path = sys.argv[1]
else:
- chipmunk_git_path = raw_input("Enter path to chipmunk source")
+ chipmunk_git_path = input("Enter path to chipmunk source")
shutil.copytree(os.path.join(chipmunk_git_path,"src"), os.path.join(pymunk_src_path,"src"))
shutil.copytree(os.path.join(chipmunk_git_path,"include"), os.path.join(pymunk_src_path,"include")) |
f845fcfc145edd2ef55df3275971f5c940a61bb4 | tests/list_match.py | tests/list_match.py | from bedrock import *
@annot('void -> int')
def main():
a = hint(Cons(0, Cons(1, Nil())), a='int')
a = Cons(1, Cons(2, Cons(3, Nil)))
b = match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
("_", lambda: 4))
assert b == 2, "List pattern match"
return 0
| from bedrock import *
@annot('void -> int')
def main():
a = hint(Cons(0, Cons(1, Nil())), a='int')
a = hint(Cons(1, Cons(2, Cons(3, Nil))), a='int')
#b = hint(match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
# ("_", lambda: 4)), a='int')
#assert b == 2, "List pattern match"
return 0
| Disable match() test for now | Disable match() test for now
| Python | mit | pshc/archipelago,pshc/archipelago,pshc/archipelago | ---
+++
@@ -3,8 +3,8 @@
@annot('void -> int')
def main():
a = hint(Cons(0, Cons(1, Nil())), a='int')
- a = Cons(1, Cons(2, Cons(3, Nil)))
- b = match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
- ("_", lambda: 4))
- assert b == 2, "List pattern match"
+ a = hint(Cons(1, Cons(2, Cons(3, Nil))), a='int')
+ #b = hint(match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
+ # ("_", lambda: 4)), a='int')
+ #assert b == 2, "List pattern match"
return 0 |
6233207fd57d499bc2bcd313a5b6d829ed712eab | tests/test_check.py | tests/test_check.py | # -*- coding: utf-8 -*-
from unittest.mock import patch
import semver
from chaostoolkit import __version__
from chaostoolkit.check import check_newer_version
class FakeResponse:
def __init__(self, status=200, url=None, response=None):
self.status_code = status
self.url = url
self.response = response
def json(self):
return self.response
@patch("chaostoolkit.check.requests", autospec=True)
def test_version_is_not_newer(requests):
requests.get.return_value = FakeResponse(
200,
"https://releases.chaostoolkit.org/latest",
{"version": __version__, "up_to_date": True}
)
latest_version = check_newer_version(command="init")
assert latest_version is None
@patch("chaostoolkit.check.requests", autospec=True)
def test_version_is_newer(requests):
newer_version = semver.bump_minor(__version__)
requests.get.return_value = FakeResponse(
200,
"http://someplace//usage/latest/",
{"version": __version__, "up_to_date": False}
)
latest_version = check_newer_version(command="init")
assert latest_version == __version__ | # -*- coding: utf-8 -*-
from unittest.mock import patch
import semver
from chaostoolkit import __version__
from chaostoolkit.check import check_newer_version
class FakeResponse:
def __init__(self, status=200, url=None, response=None):
self.status_code = status
self.url = url
self.response = response
def json(self):
return self.response
@patch("chaostoolkit.check.requests", autospec=True)
def test_version_is_not_newer(requests):
requests.get.return_value = FakeResponse(
200,
"https://releases.chaostoolkit.org/latest",
{"version": __version__, "up_to_date": True}
)
latest_version = check_newer_version(command="init")
assert latest_version is None
@patch("chaostoolkit.check.requests", autospec=True)
def test_version_is_newer(requests):
version = __version__.replace("rc", "-rc")
newer_version = semver.bump_minor(version)
requests.get.return_value = FakeResponse(
200,
"http://someplace//usage/latest/",
{"version": __version__, "up_to_date": False}
)
latest_version = check_newer_version(command="init")
assert latest_version == __version__ | Handle differences between semver and PEP440 | Handle differences between semver and PEP440
Signed-off-by: Sylvain Hellegouarch <16795633e2c1543064a3ad70ac3ba71d3d589b3b@defuze.org>
| Python | apache-2.0 | chaostoolkit/chaostoolkit | ---
+++
@@ -31,7 +31,8 @@
@patch("chaostoolkit.check.requests", autospec=True)
def test_version_is_newer(requests):
- newer_version = semver.bump_minor(__version__)
+ version = __version__.replace("rc", "-rc")
+ newer_version = semver.bump_minor(version)
requests.get.return_value = FakeResponse(
200,
"http://someplace//usage/latest/", |
d89747e26371b1986b4cec5a7514ba2c99480487 | tests/test_codec.py | tests/test_codec.py | from .common import *
from av.codec import Codec, Encoder, Decoder
class TestCodecs(TestCase):
def test_codec_mpeg4(self):
for cls in (Encoder, Decoder):
c = cls('mpeg4')
self.assertEqual(c.name, 'mpeg4')
self.assertEqual(c.long_name, 'MPEG-4 part 2')
self.assertEqual(c.type, 'video')
self.assertEqual(c.id, 13)
formats = c.video_formats
self.assertEqual(len(formats), 1)
self.assertEqual(formats[0].name, 'yuv420p')
| from .common import *
from av.codec import Codec, Encoder, Decoder
class TestCodecs(TestCase):
def test_codec_mpeg4(self):
for cls in (Encoder, Decoder):
c = cls('mpeg4')
self.assertEqual(c.name, 'mpeg4')
self.assertEqual(c.long_name, 'MPEG-4 part 2')
self.assertEqual(c.type, 'video')
self.assertEqual(c.id, 13)
formats = c.video_formats
self.assertTrue(formats)
self.assertTrue(any(f.name == 'yuv420p' for f in formats))
| Allow codec test to have more than just the one format | Allow codec test to have more than just the one format
| Python | bsd-3-clause | mcpv/PyAV,danielballan/PyAV,pupil-labs/PyAV,PyAV-Org/PyAV,markreidvfx/PyAV,PyAV-Org/PyAV,xxr3376/PyAV,pupil-labs/PyAV,pupil-labs/PyAV,xxr3376/PyAV,mikeboers/PyAV,markreidvfx/PyAV,danielballan/PyAV,danielballan/PyAV,xxr3376/PyAV,markreidvfx/PyAV,mcpv/PyAV,mikeboers/PyAV,mcpv/PyAV,pupil-labs/PyAV | ---
+++
@@ -13,5 +13,5 @@
self.assertEqual(c.id, 13)
formats = c.video_formats
- self.assertEqual(len(formats), 1)
- self.assertEqual(formats[0].name, 'yuv420p')
+ self.assertTrue(formats)
+ self.assertTrue(any(f.name == 'yuv420p' for f in formats)) |
82380cc6631ae91e0ef961e3bdfde70b9710af0f | reddit_liveupdate/activity.py | reddit_liveupdate/activity.py | from r2.lib import amqp, websockets
from reddit_liveupdate.models import ActiveVisitorsByLiveUpdateEvent
def broadcast_update():
event_ids = ActiveVisitorsByLiveUpdateEvent._cf.get_range(
column_count=1, filter_empty=False)
for event_id, is_active in event_ids:
if is_active:
count, is_fuzzed = ActiveVisitorsByLiveUpdateEvent.get_count(
event_id, cached=False)
else:
count, is_fuzzed = 0, False
payload = {
"count": count,
"fuzzed": is_fuzzed,
}
websockets.send_broadcast(
"/live/" + event_id, type="activity", payload=payload)
amqp.worker.join()
| from r2.lib import amqp, websockets
from reddit_liveupdate.models import ActiveVisitorsByLiveUpdateEvent
def broadcast_update():
event_ids = ActiveVisitorsByLiveUpdateEvent._cf.get_range(
column_count=1, filter_empty=False)
for event_id, is_active in event_ids:
if is_active:
count, is_fuzzed = ActiveVisitorsByLiveUpdateEvent.get_count(
event_id, cached=False)
else:
count, is_fuzzed = 0, False
payload = {
"count": count,
"fuzzed": is_fuzzed,
}
websockets.send_broadcast(
"/live/" + event_id, type="activity", payload=payload)
# ensure that all the amqp messages we've put on the worker's queue are
# sent before we allow this script to exit.
amqp.worker.join()
| Add comment to amqp.worker.join call. | Add comment to amqp.worker.join call.
| Python | bsd-3-clause | florenceyeun/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate | ---
+++
@@ -22,4 +22,6 @@
websockets.send_broadcast(
"/live/" + event_id, type="activity", payload=payload)
+ # ensure that all the amqp messages we've put on the worker's queue are
+ # sent before we allow this script to exit.
amqp.worker.join() |
6fee21a630a9ba3b54f58152cb4549b4170b833f | docdata/urls.py | docdata/urls.py | from django.conf.urls.defaults import *
urlpatterns = patterns('docdata.views',
# Status change notifications
url(r'^status_change/$', 'status_change', name='status_change'),
) | from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('docdata.views',
# Status change notifications
url(r'^status_change/$', 'status_change', name='status_change'),
)
| Fix URL's to work with Django 1.5 | Fix URL's to work with Django 1.5 | Python | agpl-3.0 | dokterbob/django-docdata | ---
+++
@@ -1,4 +1,4 @@
-from django.conf.urls.defaults import *
+from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('docdata.views', |
3f2d27f63c1cfe2cc4616a4314420fa23daca487 | django_lightweight_queue/task.py | django_lightweight_queue/task.py | from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None, sigkill_on_stop=False):
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout, self.sigkill_on_stop)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout, sigkill_on_stop):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
# Allow us to override which queue at the last moment
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
job = Job(self.path, args, kwargs, self.timeout, self.sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue)
| from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None, sigkill_on_stop=False):
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout, self.sigkill_on_stop)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout, sigkill_on_stop):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.sigkill_on_stop = sigkill_on_stop
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
# Allow us to override the default values dynamically
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
timeout = kwargs.pop('django_lightweight_queue_timeout', self.timeout)
sigkill_on_stop = kwargs.pop(
'django_lightweight_queue_sigkill_on_stop',
self.sigkill_on_stop,
)
job = Job(self.path, args, kwargs, timeout, sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue)
| Allow overriding timeout and sigkill_on_stop too. | Allow overriding timeout and sigkill_on_stop too.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
| Python | bsd-3-clause | thread/django-lightweight-queue,prophile/django-lightweight-queue,prophile/django-lightweight-queue,thread/django-lightweight-queue,lamby/django-lightweight-queue | ---
+++
@@ -27,10 +27,15 @@
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
- # Allow us to override which queue at the last moment
+ # Allow us to override the default values dynamically
queue = kwargs.pop('django_lightweight_queue_queue', self.queue)
+ timeout = kwargs.pop('django_lightweight_queue_timeout', self.timeout)
+ sigkill_on_stop = kwargs.pop(
+ 'django_lightweight_queue_sigkill_on_stop',
+ self.sigkill_on_stop,
+ )
- job = Job(self.path, args, kwargs, self.timeout, self.sigkill_on_stop)
+ job = Job(self.path, args, kwargs, timeout, sigkill_on_stop)
job.validate()
get_backend().enqueue(job, queue) |
2c013f4fd30e93dc50f844a6c507b6b9f7d1c80e | doc/users/figures/background2.py | doc/users/figures/background2.py | from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
# setup Lambert Conformal basemap.
# set resolution=None to skip processing of boundary datasets.
m = Basemap(width=12000000,height=9000000,projection='lcc',
resolution=None,lat_1=45.,lat_2=55,lat_0=50,lon_0=-107.)
# draw a land-sea mask for a map background.
# lakes=True means plot inland lakes with ocean color.
m.drawlsmask(land_color='coral',ocean_color='aqua',lakes=True)
plt.show()
plt.show()
| from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
# setup Lambert Conformal basemap.
# set resolution=None to skip processing of boundary datasets.
m = Basemap(width=12000000,height=9000000,projection='lcc',
resolution=None,lat_1=45.,lat_2=55,lat_0=50,lon_0=-107.)
# draw a land-sea mask for a map background.
# lakes=True means plot inland lakes with ocean color.
m.drawlsmask(land_color='coral',ocean_color='aqua',lakes=True)
plt.show()
| Remove a redudant "plt.show()" statement | Remove a redudant "plt.show()" statement
| Python | mit | matplotlib/basemap,guziy/basemap,guziy/basemap,matplotlib/basemap | ---
+++
@@ -8,4 +8,3 @@
# lakes=True means plot inland lakes with ocean color.
m.drawlsmask(land_color='coral',ocean_color='aqua',lakes=True)
plt.show()
-plt.show() |
542f0d6f3091e1cbc3ab0563d2915a9ac80c3c91 | edmunds/exceptions/handler.py | edmunds/exceptions/handler.py |
from werkzeug.exceptions import HTTPException
import sys
from six import reraise
class Handler(object):
"""
The Exception handler
"""
def __init__(self, app):
"""
Initiate
:param app: The application
:type app: Edmunds.Application
"""
self.app = app
self.dont_report = []
def report(self, exception):
"""
Report the exception
:param exception: The exception
:type exception: Exception
:return: Should report
"""
if exception.__class__ in self.dont_report:
return False
return True
def render(self, exception):
"""
Render the exception
:param exception: The exception
:type exception: Exception
:return: The response
"""
# Determine status code
status_code = 500
if isinstance(exception, HTTPException):
status_code = exception.code
is_error_code = status_code - (status_code % 100) == 500
if self.app.debug and is_error_code:
if sys.version_info < (3, 0):
exc_type, exc_value, tb = sys.exc_info()
if exc_value is exception:
reraise(exc_type, exc_value, tb)
raise exception
elif isinstance(exception, HTTPException):
return exception.get_response()
elif self.app.testing and is_error_code:
return str(status_code), '%s' % exception
else:
return str(status_code), status_code
|
from werkzeug.exceptions import HTTPException
import sys
from six import reraise
class Handler(object):
"""
The Exception handler
"""
def __init__(self, app):
"""
Initiate
:param app: The application
:type app: Edmunds.Application
"""
self.app = app
self.dont_report = []
def report(self, exception):
"""
Report the exception
:param exception: The exception
:type exception: Exception
:return: Should report
"""
if exception.__class__ in self.dont_report:
return False
return True
def render(self, exception):
"""
Render the exception
:param exception: The exception
:type exception: Exception
:return: The response
"""
# Determine status code
status_code = 500
if isinstance(exception, HTTPException):
status_code = exception.code
is_error_code = status_code - (status_code % 100) == 500
if self.app.debug and is_error_code:
if sys.version_info < (3, 0):
exc_type, exc_value, tb = sys.exc_info()
if exc_value is exception:
reraise(exc_type, exc_value, tb)
raise exception
elif isinstance(exception, HTTPException):
return exception.get_response()
elif self.app.testing and is_error_code:
return '%s' % exception, status_code
else:
return str(status_code), status_code
| Fix response for testing errors | Fix response for testing errors
| Python | apache-2.0 | LowieHuyghe/edmunds | ---
+++
@@ -55,6 +55,6 @@
elif isinstance(exception, HTTPException):
return exception.get_response()
elif self.app.testing and is_error_code:
- return str(status_code), '%s' % exception
+ return '%s' % exception, status_code
else:
return str(status_code), status_code |
442567a959b9fd5796de2f13154f66cdb25534b3 | python/default_crab_config.py | python/default_crab_config.py | __author__ = 'sbrochet'
def create_config(is_mc):
"""
Create a default CRAB configuration suitable to run the framework
:return:
"""
from CRABClient.UserUtilities import config, getUsernameFromSiteDB
config = config()
config.General.workArea = 'tasks'
config.General.transferOutputs = True
config.General.transferLogs = True
config.JobType.pluginName = 'Analysis'
config.JobType.disableAutomaticOutputCollection = True
config.JobType.outputFiles = []
config.JobType.allowUndistributedCMSSW = True
config.JobType.sendExternalFolder = True # To send electron MVA ids with jobs
config.Data.inputDBS = 'global'
if is_mc:
config.Data.splitting = 'FileBased'
else:
config.Data.splitting = 'LumiBased'
config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
config.Data.publication = False
config.Site.storageSite = 'T2_BE_UCL'
return config
| __author__ = 'sbrochet'
def create_config(is_mc):
"""
Create a default CRAB configuration suitable to run the framework
:return:
"""
from CRABClient.UserUtilities import config, getUsernameFromSiteDB
config = config()
config.General.workArea = 'tasks'
config.General.transferOutputs = True
config.General.transferLogs = True
config.JobType.pluginName = 'Analysis'
config.JobType.disableAutomaticOutputCollection = True
config.JobType.outputFiles = []
config.JobType.allowUndistributedCMSSW = True
config.JobType.sendExternalFolder = True # To send electron MVA ids with jobs
config.Data.inputDBS = 'global'
config.Data.allowNonValidInputDataset = True
if is_mc:
config.Data.splitting = 'FileBased'
else:
config.Data.splitting = 'LumiBased'
config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
config.Data.publication = False
config.Site.storageSite = 'T2_BE_UCL'
return config
| Allow crab to run on PRODUCTION datasets | Allow crab to run on PRODUCTION datasets
| Python | mit | cp3-llbb/GridIn,cp3-llbb/GridIn | ---
+++
@@ -20,6 +20,7 @@
config.JobType.sendExternalFolder = True # To send electron MVA ids with jobs
config.Data.inputDBS = 'global'
+ config.Data.allowNonValidInputDataset = True
if is_mc:
config.Data.splitting = 'FileBased' |
5834fd76b74650366eb73c759541116cfbbfcbbe | radar/web/template_filters.py | radar/web/template_filters.py | from jinja2 import escape, Markup, evalcontextfilter
from radar.lib.utils import date_to_datetime, is_date
def strftime(dt, dt_format):
if dt is None:
return ''
else:
return dt.strftime(dt_format)
def year_format(dt):
if dt is None:
return ''
else:
return '%04d' % dt.year
def date_format(dt):
if dt is None:
return ''
else:
return '%02d/%02d/%04d' % (dt.day, dt.month, dt.year)
def datetime_format(dt, seconds=False):
if dt is None:
return ''
else:
if is_date(dt):
dt = date_to_datetime(dt)
output = '%02d/%02d/%04d %02d:%02d' % (dt.day, dt.month, dt.year, dt.hour, dt.minute)
if seconds:
output += ':%02d' % dt.second
return output
@evalcontextfilter
def nl2br(eval_ctx, value):
value = escape(value)
value = value.replace('\n', Markup('<br />\n'))
if eval_ctx.autoescape:
value = Markup(value)
return value
def missing(value):
if value is None or value == '':
return '-'
else:
return value
def yn(value):
if value is None:
return '-'
elif value:
return 'Yes'
else:
return 'No'
| from jinja2 import escape, Markup, evalcontextfilter
from radar.lib.utils import date_to_datetime, is_date
def strftime(dt, dt_format):
if dt is None:
return ''
else:
return dt.strftime(dt_format)
def year_format(dt):
if dt is None:
return ''
else:
return '%04d' % dt.year
def date_format(dt):
if dt is None:
return ''
else:
return '%02d/%02d/%04d' % (dt.day, dt.month, dt.year)
def datetime_format(dt, seconds=False):
if dt is None:
return ''
else:
if is_date(dt):
dt = date_to_datetime(dt)
output = '%02d/%02d/%04d %02d:%02d' % (dt.day, dt.month, dt.year, dt.hour, dt.minute)
if seconds:
output += ':%02d' % dt.second
return output
@evalcontextfilter
def nl2br(eval_ctx, value):
if value is None:
return ''
value = escape(value)
value = value.replace('\n', Markup('<br />\n'))
if eval_ctx.autoescape:
value = Markup(value)
return value
def missing(value):
if value is None or value == '':
return '-'
else:
return value
def yn(value):
if value is None:
return '-'
elif value:
return 'Yes'
else:
return 'No'
| Update nl2br to work with None as input | Update nl2br to work with None as input
| Python | agpl-3.0 | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | ---
+++
@@ -41,6 +41,9 @@
@evalcontextfilter
def nl2br(eval_ctx, value):
+ if value is None:
+ return ''
+
value = escape(value)
value = value.replace('\n', Markup('<br />\n'))
|
8c07f3decfc5fb556c9818172e6b7749d31eca37 | purchase_open_qty/__manifest__.py | purchase_open_qty/__manifest__.py | # Copyright 2017 ForgeFlow S.L.
# (http://www.forgeflow.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
"name": "Purchase Open Qty",
"summary": "Allows to identify the purchase orders that have quantities "
"pending to invoice or to receive.",
"version": "13.0.1.0.1",
"author": "ForgeFlow, " "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Purchases",
"depends": ["purchase_stock"],
"data": ["views/purchase_view.xml"],
"pre_init_hook": "pre_init_hook",
"license": "AGPL-3",
"installable": True,
"application": False,
}
| # Copyright 2017 ForgeFlow S.L.
# (http://www.forgeflow.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
"name": "Purchase Open Qty",
"summary": "Allows to identify the purchase orders that have quantities "
"pending to invoice or to receive.",
"version": "13.0.1.0.1",
"author": "ForgeFlow, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Purchases",
"depends": ["purchase_stock"],
"data": ["views/purchase_view.xml"],
"pre_init_hook": "pre_init_hook",
"license": "AGPL-3",
"installable": True,
"application": False,
}
| Delete empty " " spaces in same string line | [FIX] Delete empty " " spaces in same string line
| Python | agpl-3.0 | OCA/purchase-workflow,OCA/purchase-workflow | ---
+++
@@ -7,7 +7,7 @@
"summary": "Allows to identify the purchase orders that have quantities "
"pending to invoice or to receive.",
"version": "13.0.1.0.1",
- "author": "ForgeFlow, " "Odoo Community Association (OCA)",
+ "author": "ForgeFlow, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/purchase-workflow",
"category": "Purchases",
"depends": ["purchase_stock"], |
66039d5238c7c18156f6a5bbbe8c28232bb65483 | example/main.py | example/main.py | #!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def get(self, request_args=None):
data = {'title': 'Nacho Application Server'}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls) | #!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application, StaticFile
class Home(Application):
def get(self, request_args=None):
data = {'title': 'Nacho Application Server'}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/static/',
StaticFile('/Users/avelino/projects/nacho/example/'))
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls) | Add sample used StaticFile server | Add sample used StaticFile server
| Python | mit | beni55/nacho,beni55/nacho,avelino/nacho | ---
+++
@@ -7,7 +7,7 @@
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
-from nacho.app import Application
+from nacho.app import Application, StaticFile
class Home(Application):
@@ -18,6 +18,8 @@
def urls():
router = Router()
+ router.add_handler('/static/',
+ StaticFile('/Users/avelino/projects/nacho/example/'))
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
|
50c44a5708d1c054207eba264e1cdf9d1f6718da | deployer/logger.py | deployer/logger.py | from __future__ import absolute_import
import logging
from logging.handlers import SysLogHandler
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL, TOTEM_ENV, \
LOG_IDENTIFIER
def init_logging(name=None):
app_logger = logging.getLogger(name)
app_logger.setLevel(LOG_ROOT_LEVEL)
app_logger.propagate = False
if TOTEM_ENV == 'local':
formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
app_logger.addHandler(handler)
else:
formatter = logging.Formatter(
'{0}[%(process)d]: %(name)s: %(message)s'
.format(LOG_IDENTIFIER))
handler = logging.handlers.SysLogHandler(
address='/dev/log',
facility=SysLogHandler.LOG_DAEMON)
handler.setFormatter(formatter)
handler.setLevel(logging.INFO)
app_logger.addHandler(handler)
app_logger.info('Logger initialized')
return app_logger
def init_celery_logging(*args, **kwargs):
init_logging('celery')
| from __future__ import absolute_import
import logging
from logging.handlers import SysLogHandler
from conf.appconfig import LOG_FORMAT, LOG_DATE, LOG_ROOT_LEVEL, TOTEM_ENV, \
LOG_IDENTIFIER
def init_logging(name=None):
app_logger = logging.getLogger(name)
app_logger.setLevel(LOG_ROOT_LEVEL)
app_logger.propagate = False
if TOTEM_ENV == 'local':
formatter = logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
app_logger.addHandler(handler)
else:
formatter = logging.Formatter(
'{0}[%(process)d]: %(name)s: %(message)s'
.format(LOG_IDENTIFIER))
handler = logging.handlers.SysLogHandler(
address='/dev/log',
facility=SysLogHandler.LOG_DAEMON)
handler.setFormatter(formatter)
handler.setLevel(LOG_ROOT_LEVEL)
app_logger.addHandler(handler)
app_logger.info('Logger initialized')
return app_logger
def init_celery_logging(*args, **kwargs):
init_logging('celery')
| Set log level for handler | Set log level for handler
| Python | mit | totem/cluster-deployer,totem/cluster-deployer,totem/cluster-deployer | ---
+++
@@ -24,7 +24,7 @@
address='/dev/log',
facility=SysLogHandler.LOG_DAEMON)
handler.setFormatter(formatter)
- handler.setLevel(logging.INFO)
+ handler.setLevel(LOG_ROOT_LEVEL)
app_logger.addHandler(handler)
app_logger.info('Logger initialized')
return app_logger |
01d812f83c5526cc304f8d691ce9203d3e95633a | sampleproj/settings/travis.py | sampleproj/settings/travis.py | """
Django settings for travis-ci builds.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
from __future__ import absolute_import
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'travis-xxxxxxxxxxxxxxxx'
| """
Django settings for travis-ci builds.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
from __future__ import absolute_import
from .base import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'travis-xxxxxxxxxxxxxxxx'
#Emails
MDOT_HELP_EMAIL = 'test@testcase.edu' # String for help desk email address
MDOT_UX_EMAIL = 'test@testcase.edu' # String for UX team email address
MDOT_FORM_EMAIL = 'test@testcase.edu' # String to email app publishing requests
| Add dummy email addresses for unit tests. | Add dummy email addresses for unit tests.
| Python | apache-2.0 | charlon/mdot,uw-it-aca/mdot,uw-it-aca/mdot,charlon/mdot,uw-it-aca/mdot,uw-it-aca/mdot,charlon/mdot | ---
+++
@@ -12,3 +12,8 @@
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'travis-xxxxxxxxxxxxxxxx'
+
+#Emails
+MDOT_HELP_EMAIL = 'test@testcase.edu' # String for help desk email address
+MDOT_UX_EMAIL = 'test@testcase.edu' # String for UX team email address
+MDOT_FORM_EMAIL = 'test@testcase.edu' # String to email app publishing requests |
0179e6680efca8a9a6a5f05db703da1ce7447b3e | flexget/__init__.py | flexget/__init__.py | #!/usr/bin/python
import os
import sys
import logging
from flexget import logger
from flexget.options import CoreOptionParser
from flexget import plugin
from flexget.manager import Manager
__version__ = '{subversion}'
log = logging.getLogger('main')
def main():
"""Main entry point for Command Line Interface"""
logger.initialize()
parser = CoreOptionParser()
plugin.load_plugins(parser)
options = parser.parse_args()[0]
try:
manager = Manager(options)
except IOError, e:
# failed to load config, TODO: why should it be handled here? So sys.exit isn't called in webui?
log.exception(e)
logger.flush_logging_to_console()
sys.exit(1)
log_level = logging.getLevelName(options.loglevel.upper())
log_file = os.path.expanduser(manager.options.logfile)
# If an absolute path is not specified, use the config directory.
if not os.path.isabs(log_file):
log_file = os.path.join(manager.config_base, log_file)
logger.start(log_file, log_level)
if options.profile:
try:
import cProfile as profile
except ImportError:
import profile
profile.runctx('manager.execute()', globals(), locals(), os.path.join(manager.config_base, 'flexget.profile'))
else:
manager.execute()
| #!/usr/bin/python
import os
import sys
import logging
from flexget import logger
from flexget.options import CoreOptionParser
from flexget import plugin
from flexget.manager import Manager
__version__ = '{subversion}'
log = logging.getLogger('main')
def main():
"""Main entry point for Command Line Interface"""
logger.initialize()
parser = CoreOptionParser()
plugin.load_plugins(parser)
options = parser.parse_args()[0]
try:
manager = Manager(options)
except IOError, e:
# failed to load config, TODO: why should it be handled here? So sys.exit isn't called in webui?
log.critical(e)
logger.flush_logging_to_console()
sys.exit(1)
log_level = logging.getLevelName(options.loglevel.upper())
log_file = os.path.expanduser(manager.options.logfile)
# If an absolute path is not specified, use the config directory.
if not os.path.isabs(log_file):
log_file = os.path.join(manager.config_base, log_file)
logger.start(log_file, log_level)
if options.profile:
try:
import cProfile as profile
except ImportError:
import profile
profile.runctx('manager.execute()', globals(), locals(), os.path.join(manager.config_base, 'flexget.profile'))
else:
manager.execute()
| Fix traceback when config file is not found. | Fix traceback when config file is not found.
git-svn-id: ad91b9aa7ba7638d69f912c9f5d012e3326e9f74@2541 3942dd89-8c5d-46d7-aeed-044bccf3e60c
| Python | mit | Danfocus/Flexget,tobinjt/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,sean797/Flexget,vfrc2/Flexget,asm0dey/Flexget,vfrc2/Flexget,OmgOhnoes/Flexget,jawilson/Flexget,v17al/Flexget,qvazzler/Flexget,jawilson/Flexget,oxc/Flexget,dsemi/Flexget,qvazzler/Flexget,asm0dey/Flexget,Pretagonist/Flexget,qk4l/Flexget,OmgOhnoes/Flexget,tarzasai/Flexget,drwyrm/Flexget,tsnoam/Flexget,Danfocus/Flexget,drwyrm/Flexget,ianstalk/Flexget,antivirtel/Flexget,camon/Flexget,ZefQ/Flexget,LynxyssCZ/Flexget,tsnoam/Flexget,jacobmetrick/Flexget,offbyone/Flexget,qk4l/Flexget,ibrahimkarahan/Flexget,oxc/Flexget,Danfocus/Flexget,tobinjt/Flexget,Flexget/Flexget,crawln45/Flexget,Pretagonist/Flexget,patsissons/Flexget,Danfocus/Flexget,poulpito/Flexget,spencerjanssen/Flexget,jacobmetrick/Flexget,jawilson/Flexget,Flexget/Flexget,qk4l/Flexget,voriux/Flexget,spencerjanssen/Flexget,ZefQ/Flexget,antivirtel/Flexget,crawln45/Flexget,malkavi/Flexget,tobinjt/Flexget,qvazzler/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,vfrc2/Flexget,JorisDeRieck/Flexget,tarzasai/Flexget,ianstalk/Flexget,Flexget/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,cvium/Flexget,tobinjt/Flexget,tvcsantos/Flexget,cvium/Flexget,JorisDeRieck/Flexget,cvium/Flexget,JorisDeRieck/Flexget,gazpachoking/Flexget,jacobmetrick/Flexget,thalamus/Flexget,dsemi/Flexget,v17al/Flexget,lildadou/Flexget,ianstalk/Flexget,ratoaq2/Flexget,gazpachoking/Flexget,ibrahimkarahan/Flexget,Flexget/Flexget,offbyone/Flexget,LynxyssCZ/Flexget,ibrahimkarahan/Flexget,patsissons/Flexget,poulpito/Flexget,ZefQ/Flexget,v17al/Flexget,grrr2/Flexget,X-dark/Flexget,xfouloux/Flexget,tvcsantos/Flexget,oxc/Flexget,spencerjanssen/Flexget,poulpito/Flexget,malkavi/Flexget,thalamus/Flexget,grrr2/Flexget,lildadou/Flexget,ratoaq2/Flexget,antivirtel/Flexget,camon/Flexget,xfouloux/Flexget,tarzasai/Flexget,xfouloux/Flexget,JorisDeRieck/Flexget,sean797/Flexget,thalamus/Flexget,dsemi/Flexget,grrr2/Flexget,crawln45/Flexget,X-dark/Flexget,patsissons/Flexget,Pretagonist/Flexget,jawilson/Flexget,voriux/Flexget,tsnoam/Flexget,sean797/Flexget,asm0dey/Flexget,X-dark/Flexget,drwyrm/Flexget,ratoaq2/Flexget,lildadou/Flexget,offbyone/Flexget | ---
+++
@@ -27,7 +27,7 @@
manager = Manager(options)
except IOError, e:
# failed to load config, TODO: why should it be handled here? So sys.exit isn't called in webui?
- log.exception(e)
+ log.critical(e)
logger.flush_logging_to_console()
sys.exit(1)
|
e0f80de15d1ddabc3bd47d6396edbaaac5a08041 | examples/motion_example.py | examples/motion_example.py | #!/usr/bin/env python3
"""This example shows how to use the Motion Click wrapper of the LetMeCreate
library.
Whenever the motion click detects an event, it flashes all LED's ten times.
The user must press Ctrl+C to terminate the program.
The Motion Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core.common import MIKROBUS_1
from letmecreate.core import led
from letmecreate.click import motion
def flash_leds(arg):
# Only flash LED's when motion starts getting detected.
if arg != 1:
return
for i in range(10):
led.switch_on(led.ALL_LEDS)
sleep(0.1)
led.switch_off(led.ALL_LEDS)
sleep(0.1)
led.init()
motion.enable(MIKROBUS_1)
motion.attach_callback(MIKROBUS_1, flash_leds)
print("LED's will flash when Motion Click detects a movement.\n")
print("Press Ctrl+C to quit.\n")
while True:
pass
| #!/usr/bin/env python3
"""This example shows how to use the Motion Click wrapper of the LetMeCreate
library.
Whenever the motion click detects an event, it flashes all LED's ten times.
The user must press Ctrl+C to terminate the program.
The Motion Click must be inserted in Mikrobus 1 before running this program.
"""
from letmecreate.core.common import MIKROBUS_1
from letmecreate.core import led
from letmecreate.click import motion
from time import sleep
def flash_leds(arg):
# Only flash LED's when motion starts getting detected.
if arg != 1:
return
for i in range(10):
led.switch_on(led.ALL_LEDS)
sleep(0.1)
led.switch_off(led.ALL_LEDS)
sleep(0.1)
led.init()
motion.enable(MIKROBUS_1)
motion.attach_callback(MIKROBUS_1, flash_leds)
print("LED's will flash when Motion Click detects a movement.\n")
print("Press Ctrl+C to quit.\n")
while True:
pass
| Add missing import for sleep | motion: Add missing import for sleep
Signed-off-by: Francois Berder <59eaf4bb0211c66c3d7532da6d77ecf42a779d82@outlook.fr>
| Python | bsd-3-clause | francois-berder/PyLetMeCreate | ---
+++
@@ -11,6 +11,7 @@
from letmecreate.core.common import MIKROBUS_1
from letmecreate.core import led
from letmecreate.click import motion
+from time import sleep
def flash_leds(arg): |
14d9b5fd2e24245ac3333d1a1fe6a4a9fd33751a | example/example.py | example/example.py | from collections import OrderedDict
import os
import sys
from plumbium import call, record, pipeline, recorders
@record()
def pipeline_stage_1():
call([os.path.expanduser('~/programming/Plumbium/example/example_script.sh')])
@record()
def pipeline_stage_2():
call([os.path.expanduser('~/programming/Plumbium/example/example_script2.sh')])
def my_pipeline():
pipeline_stage_1()
pipeline_stage_2()
def example_pipeline():
csvfile = recorders.CSVFile(
'csv_results.csv',
OrderedDict([
('subject', lambda x: x['metadata']['subject']),
('start_date', lambda x: x['start_date']),
('data_val', lambda x: x['processes'][-1]['printed_output'].strip().split(':')[1])
])
)
pipeline.run('example', my_pipeline, sys.argv[1], metadata={'subject': 1}, recorder=csvfile)
if __name__ == '__main__':
example_pipeline()
| from collections import OrderedDict
import os
import sys
from plumbium import call, record, pipeline
from plumbium.recorders import CSVFile
@record()
def pipeline_stage_1():
call(['echo', 'foo'])
@record()
def pipeline_stage_2():
call(['echo', 'data: 55'])
def my_pipeline():
pipeline_stage_1()
pipeline_stage_2()
def example_pipeline():
csvfile = CSVFile(
'csv_results.csv',
OrderedDict([
('subject', lambda x: x['metadata']['subject']),
('start_date', lambda x: x['start_date']),
('data_val', lambda x: x['processes'][-1]['printed_output'].strip().split(':')[1])
])
)
pipeline.run('example', my_pipeline, sys.argv[1], metadata={'subject': 1}, recorder=csvfile)
if __name__ == '__main__':
example_pipeline()
| Update to work with recorders submodule | Update to work with recorders submodule
| Python | mit | jstutters/Plumbium | ---
+++
@@ -1,17 +1,18 @@
from collections import OrderedDict
import os
import sys
-from plumbium import call, record, pipeline, recorders
+from plumbium import call, record, pipeline
+from plumbium.recorders import CSVFile
@record()
def pipeline_stage_1():
- call([os.path.expanduser('~/programming/Plumbium/example/example_script.sh')])
+ call(['echo', 'foo'])
@record()
def pipeline_stage_2():
- call([os.path.expanduser('~/programming/Plumbium/example/example_script2.sh')])
+ call(['echo', 'data: 55'])
def my_pipeline():
@@ -20,7 +21,7 @@
def example_pipeline():
- csvfile = recorders.CSVFile(
+ csvfile = CSVFile(
'csv_results.csv',
OrderedDict([
('subject', lambda x: x['metadata']['subject']), |
3c573e2b02a18627b82f4a25fef67adae295d653 | rbm2m/models/setting.py | rbm2m/models/setting.py | # -*- coding: utf-8 -*-
from sqlalchemy import Column, String
from .base import Base
class Setting(Base):
__tablename__ = 'settings'
name = Column(String(32), nullable=False, primary_key=True)
value = Column(String(512))
default_value = Column(String(512))
title = Column(String(127), nullable=False)
data_type = Column(String(8)) # string or text for now
description = Column(String(512))
| # -*- coding: utf-8 -*-
from sqlalchemy import Column, String, Text
from .base import Base
class Setting(Base):
__tablename__ = 'settings'
name = Column(String(32), nullable=False, primary_key=True)
value = Column(Text)
default_value = Column(Text)
title = Column(String(127), nullable=False)
data_type = Column(String(8)) # string or text for now
description = Column(String(512))
| Set setings.value type to text | Set setings.value type to text
| Python | apache-2.0 | notapresent/rbm2m,notapresent/rbm2m | ---
+++
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-from sqlalchemy import Column, String
+from sqlalchemy import Column, String, Text
from .base import Base
@@ -8,8 +8,8 @@
__tablename__ = 'settings'
name = Column(String(32), nullable=False, primary_key=True)
- value = Column(String(512))
- default_value = Column(String(512))
+ value = Column(Text)
+ default_value = Column(Text)
title = Column(String(127), nullable=False)
data_type = Column(String(8)) # string or text for now
description = Column(String(512)) |
7f57e850619f0a4ca4f63aa26234ce3fba8b9cf0 | reference/gittaggers.py | reference/gittaggers.py | from setuptools.command.egg_info import egg_info
import subprocess
import time
class EggInfoFromGit(egg_info):
"""Tag the build with git commit timestamp.
If a build tag has already been set (e.g., "egg_info -b", building
from source package), leave it alone.
"""
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
['git', 'log', '--first-parent', '--max-count=1',
'--format=format:%ct', '.']).strip()
return time.strftime('.%Y%m%d%H%M%S', time.gmtime(int(gitinfo)))
def tags(self):
if self.tag_build is None:
self.tag_build = self.git_timestamp_tag()
return egg_info.tags(self)
| from setuptools.command.egg_info import egg_info
import subprocess
import time
class EggInfoFromGit(egg_info):
"""Tag the build with git commit timestamp.
If a build tag has already been set (e.g., "egg_info -b", building
from source package), leave it alone.
"""
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
['git', 'log', '--first-parent', '--max-count=1',
'--format=format:%ct', '..']).strip()
return time.strftime('.%Y%m%d%H%M%S', time.gmtime(int(gitinfo)))
def tags(self):
if self.tag_build is None:
self.tag_build = self.git_timestamp_tag()
return egg_info.tags(self)
| Fix Python packaging to use correct git log for package time/version stamps. | Fix Python packaging to use correct git log for package time/version stamps.
| Python | apache-2.0 | foreveremain/common-workflow-language,SciDAP/cwltool,brainstorm/common-workflow-language,dleehr/common-workflow-language,dleehr/cwltool,guillermo-carrasco/common-workflow-language,dleehr/cwltool,ohsu-computational-biology/common-workflow-language,StarvingMarvin/common-workflow-language,satra/common-workflow-language,mr-c/common-workflow-language,satra/common-workflow-language,jeremiahsavage/cwltool,chapmanb/cwltool,hmenager/common-workflow-language,dleehr/cwltool,StarvingMarvin/common-workflow-language,common-workflow-language/cwltool,common-workflow-language/common-workflow-language,guillermo-carrasco/common-workflow-language,SciDAP/cwltool,dleehr/common-workflow-language,hmenager/common-workflow-language,common-workflow-language/cwltool,hmenager/common-workflow-language,slnovak/common-workflow-language,curoverse/common-workflow-language,common-workflow-language/common-workflow-language,foreveremain/common-workflow-language,common-workflow-language/cwltool,ohsu-computational-biology/common-workflow-language,stain/common-workflow-language,jeremiahsavage/cwltool,common-workflow-language/common-workflow-language,chapmanb/cwltool,SciDAP/cwltool,chapmanb/cwltool,brainstorm/common-workflow-language,guillermo-carrasco/common-workflow-language,common-workflow-language/common-workflow-language,mr-c/common-workflow-language,stain/common-workflow-language,jeremiahsavage/cwltool,StarvingMarvin/common-workflow-language,dleehr/common-workflow-language,dleehr/cwltool,StarvingMarvin/common-workflow-language,slnovak/common-workflow-language,slnovak/common-workflow-language,ohsu-computational-biology/common-workflow-language,mr-c/common-workflow-language,SciDAP/cwltool,stain/common-workflow-language,dleehr/common-workflow-language,jeremiahsavage/cwltool,stain/common-workflow-language,hmenager/common-workflow-language,satra/common-workflow-language,curoverse/common-workflow-language,brainstorm/common-workflow-language,chapmanb/cwltool,foreveremain/common-workflow-language | ---
+++
@@ -11,7 +11,7 @@
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
['git', 'log', '--first-parent', '--max-count=1',
- '--format=format:%ct', '.']).strip()
+ '--format=format:%ct', '..']).strip()
return time.strftime('.%Y%m%d%H%M%S', time.gmtime(int(gitinfo)))
def tags(self): |
599ec99b6f57e37f7f4009afb9498abffd70ff34 | grammpy_transforms/SplittedRules/splitted_rules.py | grammpy_transforms/SplittedRules/splitted_rules.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.12.2017 16:05
:Licence GNUv3
Part of transofmer
"""
from grammpy import Nonterminal
def splitted_rules(root: Nonterminal):
return root
| #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.12.2017 16:05
:Licence GNUv3
Part of transofmer
"""
from grammpy import Nonterminal, Rule, EPSILON
from grammpy.Grammars.MultipleRulesGrammar import SplitRule
class Adding:
def __init__(self, rule: Rule):
self.rule = rule
self.processed = False
def process(self):
child_symbols = self.rule.to_symbols
self.processed = True
child_rules = []
for child in child_symbols: # type: Nonterminal
if child.to_rule is not None:
child_rules.append(child.to_rule)
return child_rules
def splitted_rules(root: Nonterminal):
stack = list()
stack.append(Adding(root.to_rule))
while len(stack) > 0:
proc = stack.pop() # type: Adding
if not proc.processed:
add = proc.process()
stack.append(proc)
for a in add:
stack.append(Adding(a))
elif isinstance(proc.rule, SplitRule):
created_rule = proc.rule.from_rule() # type: Rule
#Solve parents
for s in proc.rule.from_symbols: # type: Nonterminal
s._set_to_rule(created_rule)
created_rule._from_symbols.append(s)
#Solve childs
for ch in proc.rule.to_symbols:
ch._set_from_rule(created_rule)
created_rule.to_symbols.append(ch)
stack.append(Adding(created_rule))
return root
| Add implementation of splitted rules | Add implementation of splitted rules
| Python | mit | PatrikValkovic/grammpy | ---
+++
@@ -7,8 +7,43 @@
"""
-from grammpy import Nonterminal
+from grammpy import Nonterminal, Rule, EPSILON
+from grammpy.Grammars.MultipleRulesGrammar import SplitRule
+class Adding:
+ def __init__(self, rule: Rule):
+ self.rule = rule
+ self.processed = False
+
+ def process(self):
+ child_symbols = self.rule.to_symbols
+ self.processed = True
+ child_rules = []
+ for child in child_symbols: # type: Nonterminal
+ if child.to_rule is not None:
+ child_rules.append(child.to_rule)
+ return child_rules
+
def splitted_rules(root: Nonterminal):
+ stack = list()
+ stack.append(Adding(root.to_rule))
+ while len(stack) > 0:
+ proc = stack.pop() # type: Adding
+ if not proc.processed:
+ add = proc.process()
+ stack.append(proc)
+ for a in add:
+ stack.append(Adding(a))
+ elif isinstance(proc.rule, SplitRule):
+ created_rule = proc.rule.from_rule() # type: Rule
+ #Solve parents
+ for s in proc.rule.from_symbols: # type: Nonterminal
+ s._set_to_rule(created_rule)
+ created_rule._from_symbols.append(s)
+ #Solve childs
+ for ch in proc.rule.to_symbols:
+ ch._set_from_rule(created_rule)
+ created_rule.to_symbols.append(ch)
+ stack.append(Adding(created_rule))
return root |
1db16a65b114d514257d4525c41e0b3f74d1d479 | dataset/dataset/settings.py | dataset/dataset/settings.py | # Scrapy settings for dataset project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'dataset'
SPIDER_MODULES = ['dataset.spiders']
NEWSPIDER_MODULE = 'dataset.spiders'
ITEM_PIPELINES = {
'dataset.pipelines.DatasetPipeline': 100,
}
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'dataset (+http://www.yourdomain.com)'
| # Scrapy settings for dataset project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'dataset'
SPIDER_MODULES = ['dataset.spiders']
NEWSPIDER_MODULE = 'dataset.spiders'
ITEM_PIPELINES = {
'dataset.pipelines.DatasetPipeline': 100,
}
FEED_URI = 'file'
FEED_FORMAT = 'jsonlines'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'dataset (+http://www.yourdomain.com)'
| Enable Feed Exports and set to JSONLines | Enable Feed Exports and set to JSONLines
| Python | mit | MaxLikelihood/CODE | ---
+++
@@ -15,5 +15,8 @@
'dataset.pipelines.DatasetPipeline': 100,
}
+FEED_URI = 'file'
+FEED_FORMAT = 'jsonlines'
+
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'dataset (+http://www.yourdomain.com)' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.