commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aad810071f5b0a93f312a93d3bfa12271ae477ee
|
ext/avalon-gunicorn.py
|
ext/avalon-gunicorn.py
|
# Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. Necessary when the server is scanning
# a music collection on start up to prevent multiple workers from
# stomping all over the database at the same time.
preload_app = True
|
# Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. This will save us memory when using
# multiple worker processes since the OS will be be able to take advantage
# of copy-on-write optimizations.
preload_app = True
|
Fix comment about why we use preload
|
Fix comment about why we use preload
|
Python
|
mit
|
tshlabs/avalonms
|
# Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. Necessary when the server is scanning
# a music collection on start up to prevent multiple workers from
# stomping all over the database at the same time.
preload_app = True
Fix comment about why we use preload
|
# Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. This will save us memory when using
# multiple worker processes since the OS will be be able to take advantage
# of copy-on-write optimizations.
preload_app = True
|
<commit_before># Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. Necessary when the server is scanning
# a music collection on start up to prevent multiple workers from
# stomping all over the database at the same time.
preload_app = True
<commit_msg>Fix comment about why we use preload<commit_after>
|
# Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. This will save us memory when using
# multiple worker processes since the OS will be be able to take advantage
# of copy-on-write optimizations.
preload_app = True
|
# Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. Necessary when the server is scanning
# a music collection on start up to prevent multiple workers from
# stomping all over the database at the same time.
preload_app = True
Fix comment about why we use preload# Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. This will save us memory when using
# multiple worker processes since the OS will be be able to take advantage
# of copy-on-write optimizations.
preload_app = True
|
<commit_before># Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. Necessary when the server is scanning
# a music collection on start up to prevent multiple workers from
# stomping all over the database at the same time.
preload_app = True
<commit_msg>Fix comment about why we use preload<commit_after># Configuration for running the Avalon Music Server under Gunicorn
# http://docs.gunicorn.org
# Note that this configuration omits a bunch of features that Gunicorn
# has (such as running as a daemon, changing users, error and access
# logging) because it is designed to be used when running Gunicorn
# with supervisord and a separate public facing web server (such as
# Nginx).
# Bind the server to an address only accessible locally. We'll be
# running Nginx which will proxy to Gunicorn and act as the public-
# facing web server.
bind = 'localhost:8000'
# Use three workers in addition to the master process. Since the Avalon
# Music Server is largely CPU bound, you can increase the number of
# request that can be handled by increasing this number (up to a point!).
# The Gunicorn docs recommend 2N + 1 where N is the number of CPUs you
# have.
workers = 3
# Make sure to load the application only in the main process before
# spawning the worker processes. This will save us memory when using
# multiple worker processes since the OS will be be able to take advantage
# of copy-on-write optimizations.
preload_app = True
|
c53e296a9a548a3d817263c0d18e3e95f947665a
|
masters/master.client.webrtc.fyi/master_site_config.py
|
masters/master.client.webrtc.fyi/master_site_config.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8063
slave_port = 8163
master_port_alt = 8263
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8072
slave_port = 8172
master_port_alt = 8272
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
|
Change client.webrtc.fyi master ports as they conflict with master.client.quickoffice
|
Change client.webrtc.fyi master ports as they conflict with master.client.quickoffice
Review URL: https://chromiumcodereview.appspot.com/23608005
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@220055 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8063
slave_port = 8163
master_port_alt = 8263
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
Change client.webrtc.fyi master ports as they conflict with master.client.quickoffice
Review URL: https://chromiumcodereview.appspot.com/23608005
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@220055 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8072
slave_port = 8172
master_port_alt = 8272
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8063
slave_port = 8163
master_port_alt = 8263
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
<commit_msg>Change client.webrtc.fyi master ports as they conflict with master.client.quickoffice
Review URL: https://chromiumcodereview.appspot.com/23608005
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@220055 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8072
slave_port = 8172
master_port_alt = 8272
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8063
slave_port = 8163
master_port_alt = 8263
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
Change client.webrtc.fyi master ports as they conflict with master.client.quickoffice
Review URL: https://chromiumcodereview.appspot.com/23608005
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@220055 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8072
slave_port = 8172
master_port_alt = 8272
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8063
slave_port = 8163
master_port_alt = 8263
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
<commit_msg>Change client.webrtc.fyi master ports as they conflict with master.client.quickoffice
Review URL: https://chromiumcodereview.appspot.com/23608005
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@220055 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class WebRTCFYI(Master.Master3):
project_name = 'WebRTC FYI'
master_port = 8072
slave_port = 8172
master_port_alt = 8272
server_url = 'http://webrtc.googlecode.com'
project_url = 'http://webrtc.googlecode.com'
from_address = 'webrtc-cb-fyi-watchlist@google.com'
permitted_domains = ('google.com', 'chromium.org', 'webrtc.org')
|
29a1a39cf4f0fed6999bd787cce7e8e65c49ef4e
|
display_image.py
|
display_image.py
|
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.axes.set_position((0, 0, 1, 1))
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
Set image to fill the entire matplotlib window
|
Set image to fill the entire matplotlib window
|
Python
|
mit
|
crowsonkb/style_transfer,crowsonkb/style_transfer,crowsonkb/style_transfer,crowsonkb/style_transfer
|
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
Set image to fill the entire matplotlib window
|
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.axes.set_position((0, 0, 1, 1))
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
<commit_before>import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
<commit_msg>Set image to fill the entire matplotlib window<commit_after>
|
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.axes.set_position((0, 0, 1, 1))
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
Set image to fill the entire matplotlib windowimport matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.axes.set_position((0, 0, 1, 1))
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
<commit_before>import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
<commit_msg>Set image to fill the entire matplotlib window<commit_after>import matplotlib.pyplot as plt
class ImageWindow:
def __init__(self):
self.imsh = None
plt.ion()
plt.show()
def display(self, image):
if self.imsh is None or not plt.fignum_exists(self.imsh.figure.number):
self.imsh = plt.imshow(image, interpolation='nearest')
self.imsh.axes.axis('off')
self.imsh.axes.set_position((0, 0, 1, 1))
self.imsh.figure.canvas.draw()
else:
self.imsh.set_data(image)
plt.pause(1e-4)
|
402056a272c94d3d28da62b08cac14ace18c835a
|
test/python_api/default-constructor/sb_address.py
|
test/python_api/default-constructor/sb_address.py
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
Add new API for SBAddress to the fuzz test:
|
Add new API for SBAddress to the fuzz test:
SetLoadAddress (lldb::addr_t load_addr,
lldb::SBTarget &target);
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@135793 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
Add new API for SBAddress to the fuzz test:
SetLoadAddress (lldb::addr_t load_addr,
lldb::SBTarget &target);
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@135793 91177308-0d34-0410-b5e6-96231b3b80d8
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
<commit_before>"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
<commit_msg>Add new API for SBAddress to the fuzz test:
SetLoadAddress (lldb::addr_t load_addr,
lldb::SBTarget &target);
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@135793 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
Add new API for SBAddress to the fuzz test:
SetLoadAddress (lldb::addr_t load_addr,
lldb::SBTarget &target);
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@135793 91177308-0d34-0410-b5e6-96231b3b80d8"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
<commit_before>"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
<commit_msg>Add new API for SBAddress to the fuzz test:
SetLoadAddress (lldb::addr_t load_addr,
lldb::SBTarget &target);
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@135793 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
eb06650567fe94b65e6ccf55446982af746761af
|
cumulusci/tasks/salesforce/communities_preflights.py
|
cumulusci/tasks/salesforce/communities_preflights.py
|
import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
if r.status_code != 200:
self.return_values = False
else:
self.return_values = True
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
|
import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
self.return_values = r.status_code == 200
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
|
Clean logic in Communities preflight
|
Clean logic in Communities preflight
|
Python
|
bsd-3-clause
|
SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI
|
import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
if r.status_code != 200:
self.return_values = False
else:
self.return_values = True
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
Clean logic in Communities preflight
|
import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
self.return_values = r.status_code == 200
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
|
<commit_before>import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
if r.status_code != 200:
self.return_values = False
else:
self.return_values = True
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
<commit_msg>Clean logic in Communities preflight<commit_after>
|
import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
self.return_values = r.status_code == 200
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
|
import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
if r.status_code != 200:
self.return_values = False
else:
self.return_values = True
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
Clean logic in Communities preflightimport requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
self.return_values = r.status_code == 200
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
|
<commit_before>import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
if r.status_code != 200:
self.return_values = False
else:
self.return_values = True
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
<commit_msg>Clean logic in Communities preflight<commit_after>import requests
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class IsCommunitiesEnabled(BaseSalesforceApiTask):
api_version = "48.0"
def _run_task(self):
s = requests.Session()
s.get(self.org_config.start_url).raise_for_status()
r = s.get(
"{}/sites/servlet.SitePrerequisiteServlet".format(
self.org_config.instance_url
)
)
self.return_values = r.status_code == 200
self.logger.info(
"Completed Communities preflight check with result {}".format(
self.return_values
)
)
|
d5fb6c5320dbb6827e24dc22be08454f05aac83e
|
emails/tests.py
|
emails/tests.py
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
def test_regular_users_dont_have_access_to_tester(self):
self.client.login(username='user', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 404)
|
Test that regular users can send test emails
|
Test that regular users can send test emails
|
Python
|
agpl-3.0
|
Turupawn/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website,Turupawn/website,lutris/website,lutris/website
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
Test that regular users can send test emails
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
def test_regular_users_dont_have_access_to_tester(self):
self.client.login(username='user', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 404)
|
<commit_before>from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
<commit_msg>Test that regular users can send test emails<commit_after>
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
def test_regular_users_dont_have_access_to_tester(self):
self.client.login(username='user', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 404)
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
Test that regular users can send test emailsfrom django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
def test_regular_users_dont_have_access_to_tester(self):
self.client.login(username='user', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 404)
|
<commit_before>from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
<commit_msg>Test that regular users can send test emails<commit_after>from django.test import TestCase
from django.core.urlresolvers import reverse
from common.util import create_admin, create_user
class TestEmailRendering(TestCase):
def setUp(self):
self.user = create_user(username='user', password='password')
self.admin = create_admin(username='admin', password='password')
def test_can_get_an_example_email(self):
response = self.client.get(reverse('example_email'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Example email")
self.assertContains(response, "The email title")
def test_can_load_email_sender_if_admin(self):
self.client.login(username='admin', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 200)
def test_regular_users_dont_have_access_to_tester(self):
self.client.login(username='user', password='password')
response = self.client.get(reverse('email_sender_test'))
self.assertEqual(response.status_code, 404)
|
186b1aabdf9574218204d1bc6da12be9b3cfb681
|
example/main.py
|
example/main.py
|
#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def __call__(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)
|
#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def get(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)
|
Use method get in example app
|
Use method get in example app
|
Python
|
mit
|
beni55/nacho,avelino/nacho,beni55/nacho
|
#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def __call__(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)Use method get in example app
|
#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def get(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)
|
<commit_before>#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def __call__(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)<commit_msg>Use method get in example app<commit_after>
|
#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def get(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)
|
#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def __call__(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)Use method get in example app#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def get(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)
|
<commit_before>#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def __call__(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)<commit_msg>Use method get in example app<commit_after>#!/usr/bin/env python3
import logging
import sys
assert sys.version >= '3.3', 'Please use Python 3.3 or higher.'
from nacho.routing import Router
from nacho.http import HttpServer
from nacho.multithreading import Superviser
from nacho.app import Application
class Home(Application):
def get(self, request_args=None):
data = {'a': 1}
self.render('home.html', **data)
def urls():
router = Router()
router.add_handler('/(.*)', Home())
return HttpServer(router, debug=True, keep_alive=75)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
superviser = Superviser()
superviser.start(urls)
|
a5e7423b01ffb4fed1987dfadbe9283480f04929
|
grazer/core/parsing.py
|
grazer/core/parsing.py
|
import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in context]
else:
for node in context:
for out in path(node):
results.append(out)
return parse(key, paths, results)
|
import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
for node in context:
for out in path(node):
results.append(out)
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in results]
else:
return parse(key, paths, results)
|
Fix for zero depth path
|
Fix for zero depth path
|
Python
|
mit
|
CodersOfTheNight/verata
|
import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in context]
else:
for node in context:
for out in path(node):
results.append(out)
return parse(key, paths, results)
Fix for zero depth path
|
import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
for node in context:
for out in path(node):
results.append(out)
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in results]
else:
return parse(key, paths, results)
|
<commit_before>import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in context]
else:
for node in context:
for out in path(node):
results.append(out)
return parse(key, paths, results)
<commit_msg>Fix for zero depth path<commit_after>
|
import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
for node in context:
for out in path(node):
results.append(out)
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in results]
else:
return parse(key, paths, results)
|
import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in context]
else:
for node in context:
for out in path(node):
results.append(out)
return parse(key, paths, results)
Fix for zero depth pathimport re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
for node in context:
for out in path(node):
results.append(out)
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in results]
else:
return parse(key, paths, results)
|
<commit_before>import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in context]
else:
for node in context:
for out in path(node):
results.append(out)
return parse(key, paths, results)
<commit_msg>Fix for zero depth path<commit_after>import re
import logging
logger = logging.getLogger("Parsing")
def create_node(data):
tag_part = r"(?P<tag>\w+)"
attr_part = r"(?P<q>\[(?P<attr>\w+)=(\"|\')(?P<val>.+?)(\"|\')\])?"
selector_part = r"(\{(?P<selector>\d+)\})?"
p = tag_part + attr_part + selector_part
patt = re.compile(p)
m = patt.match(data)
tag = m.group("tag")
if m.group("q"):
q = {m.group("attr"): m.group("val")}
else:
q = None
def selector(lst):
s = m.group("selector")
if s:
sel = int(s)
return [lst[sel]] if sel < len(lst) else []
else:
return lst
def node(root):
return selector(root.findAll(tag, q))
return node
def parse(key, paths, context):
path = paths.pop()
results = []
for node in context:
for out in path(node):
results.append(out)
if len(paths) == 0:
# End of line
return [(key, result.text, result.attrs)
for result in results]
else:
return parse(key, paths, results)
|
5bbed41d8150f6d0657f1a7670b449619f3ba0f7
|
promgen/util.py
|
promgen/util.py
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests
from promgen.version import __version__
def post(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.post(url, *args, **kwargs)
def get(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.get(url, *args, **kwargs)
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests.sessions
from promgen.version import __version__
def post(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.post(url, **kwargs)
def get(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.get(url, **kwargs)
|
Copy the pattern from requests.api to use a slightly more stable API
|
Copy the pattern from requests.api to use a slightly more stable API
|
Python
|
mit
|
kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests
from promgen.version import __version__
def post(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.post(url, *args, **kwargs)
def get(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.get(url, *args, **kwargs)
Copy the pattern from requests.api to use a slightly more stable API
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests.sessions
from promgen.version import __version__
def post(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.post(url, **kwargs)
def get(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.get(url, **kwargs)
|
<commit_before># Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests
from promgen.version import __version__
def post(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.post(url, *args, **kwargs)
def get(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.get(url, *args, **kwargs)
<commit_msg>Copy the pattern from requests.api to use a slightly more stable API<commit_after>
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests.sessions
from promgen.version import __version__
def post(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.post(url, **kwargs)
def get(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.get(url, **kwargs)
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests
from promgen.version import __version__
def post(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.post(url, *args, **kwargs)
def get(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.get(url, *args, **kwargs)
Copy the pattern from requests.api to use a slightly more stable API# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests.sessions
from promgen.version import __version__
def post(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.post(url, **kwargs)
def get(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.get(url, **kwargs)
|
<commit_before># Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests
from promgen.version import __version__
def post(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.post(url, *args, **kwargs)
def get(url, *args, **kwargs):
'''Wraps requests.post with our user-agent'''
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__)
return requests.get(url, *args, **kwargs)
<commit_msg>Copy the pattern from requests.api to use a slightly more stable API<commit_after># Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests.sessions
from promgen.version import __version__
def post(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.post(url, **kwargs)
def get(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.get(url, **kwargs)
|
00e9f7d239287896946511b81e2029a5db1f435c
|
scipy/fftpack/__init__.py
|
scipy/fftpack/__init__.py
|
#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from realtransforms import *
__all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
Add dct and idct in scipy.fftpack namespace.
|
Add dct and idct in scipy.fftpack namespace.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5519 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
Python
|
bsd-3-clause
|
scipy/scipy-svn,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,scipy/scipy-svn,jasonmccampbell/scipy-refactor
|
#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
Add dct and idct in scipy.fftpack namespace.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5519 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from realtransforms import *
__all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
<commit_before>#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
<commit_msg>Add dct and idct in scipy.fftpack namespace.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5519 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>
|
#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from realtransforms import *
__all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
Add dct and idct in scipy.fftpack namespace.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5519 d6536bca-fef9-0310-8506-e4c0a848fbcf#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from realtransforms import *
__all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
<commit_before>#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
<commit_msg>Add dct and idct in scipy.fftpack namespace.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@5519 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>#
# fftpack - Discrete Fourier Transform algorithms.
#
# Created: Pearu Peterson, August,September 2002
from info import __all__,__doc__
from fftpack_version import fftpack_version as __version__
from basic import *
from pseudo_diffs import *
from helper import *
from numpy.dual import register_func
for k in ['fft', 'ifft', 'fftn', 'ifftn', 'fft2', 'ifft2']:
register_func(k, eval(k))
del k, register_func
from realtransforms import *
__all__.extend(['dct', 'idct'])
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
5a9fe4f8100a36fff3e4c4af21a76d18ac27766f
|
headers/cpp/headers.py
|
headers/cpp/headers.py
|
#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
_TRANSITIVE = False
_INCLUDE_DIRS = ['.']
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
|
#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
# Allow a site to override the defaults if they choose.
# Just put a siteheaders.py somewhere in the PYTHONPATH.
try:
import siteheaders
except ImportError:
siteheaders = None
_TRANSITIVE = getattr(siteheaders, 'TRANSITIVE', False)
_INCLUDE_DIRS = getattr(siteheaders, 'INCLUDE_DIRS', ['.'])
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
|
Allow a site to override the default search (include) directories.
|
Allow a site to override the default search (include) directories.
git-svn-id: b0ea89ea3bf41df64b6a046736e217d0ae4a0fba@19 806ff5bb-693f-0410-b502-81bc3482ff28
|
Python
|
apache-2.0
|
myint/cppclean,myint/cppclean,myint/cppclean,myint/cppclean
|
#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
_TRANSITIVE = False
_INCLUDE_DIRS = ['.']
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
Allow a site to override the default search (include) directories.
git-svn-id: b0ea89ea3bf41df64b6a046736e217d0ae4a0fba@19 806ff5bb-693f-0410-b502-81bc3482ff28
|
#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
# Allow a site to override the defaults if they choose.
# Just put a siteheaders.py somewhere in the PYTHONPATH.
try:
import siteheaders
except ImportError:
siteheaders = None
_TRANSITIVE = getattr(siteheaders, 'TRANSITIVE', False)
_INCLUDE_DIRS = getattr(siteheaders, 'INCLUDE_DIRS', ['.'])
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
|
<commit_before>#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
_TRANSITIVE = False
_INCLUDE_DIRS = ['.']
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
<commit_msg>Allow a site to override the default search (include) directories.
git-svn-id: b0ea89ea3bf41df64b6a046736e217d0ae4a0fba@19 806ff5bb-693f-0410-b502-81bc3482ff28<commit_after>
|
#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
# Allow a site to override the defaults if they choose.
# Just put a siteheaders.py somewhere in the PYTHONPATH.
try:
import siteheaders
except ImportError:
siteheaders = None
_TRANSITIVE = getattr(siteheaders, 'TRANSITIVE', False)
_INCLUDE_DIRS = getattr(siteheaders, 'INCLUDE_DIRS', ['.'])
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
|
#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
_TRANSITIVE = False
_INCLUDE_DIRS = ['.']
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
Allow a site to override the default search (include) directories.
git-svn-id: b0ea89ea3bf41df64b6a046736e217d0ae4a0fba@19 806ff5bb-693f-0410-b502-81bc3482ff28#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
# Allow a site to override the defaults if they choose.
# Just put a siteheaders.py somewhere in the PYTHONPATH.
try:
import siteheaders
except ImportError:
siteheaders = None
_TRANSITIVE = getattr(siteheaders, 'TRANSITIVE', False)
_INCLUDE_DIRS = getattr(siteheaders, 'INCLUDE_DIRS', ['.'])
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
|
<commit_before>#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
_TRANSITIVE = False
_INCLUDE_DIRS = ['.']
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
<commit_msg>Allow a site to override the default search (include) directories.
git-svn-id: b0ea89ea3bf41df64b6a046736e217d0ae4a0fba@19 806ff5bb-693f-0410-b502-81bc3482ff28<commit_after>#!/usr/bin/env python
"""Find and print the headers #include'd in a source file."""
import os
import sys
from cpp import ast
from cpp import utils
# Allow a site to override the defaults if they choose.
# Just put a siteheaders.py somewhere in the PYTHONPATH.
try:
import siteheaders
except ImportError:
siteheaders = None
_TRANSITIVE = getattr(siteheaders, 'TRANSITIVE', False)
_INCLUDE_DIRS = getattr(siteheaders, 'INCLUDE_DIRS', ['.'])
def ReadSource(relative_filename):
source = None
for path in _INCLUDE_DIRS:
filename = os.path.join(path, relative_filename)
source = utils.ReadFile(filename, False)
if source:
return source, filename
return None, relative_filename
def GetHeaders(filename):
source, actual_filename = ReadSource(filename)
if source is None:
print 'Unable to find', filename
return []
included_files = []
print 'Processing', actual_filename
builder = ast.BuilderFromSource(source)
for node in builder.Generate():
if isinstance(node, ast.Include):
if not node.system:
print node.filename
included_files.append(node.filename)
# Transitively process all the files that were included.
if _TRANSITIVE:
for filename in included_files:
included_files.extend(GetHeaders(filename))
return included_files
def main(argv):
for filename in argv[1:]:
GetHeaders(filename)
if __name__ == '__main__':
main(sys.argv)
|
85f8d0662901047115f2d852489a3a5be1a01226
|
datafilters/views.py
|
datafilters/views.py
|
try:
from django.views.generic.base import ContextMixin as mixin_base
except ImportError:
mixin_base = object
__all__ = ('FilterFormMixin',)
class FilterFormMixin(mixin_base):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
Set base class for view mixin to MultipleObjectMixin
|
Set base class for view mixin to MultipleObjectMixin
|
Python
|
mit
|
freevoid/django-datafilters,zorainc/django-datafilters,zorainc/django-datafilters
|
try:
from django.views.generic.base import ContextMixin as mixin_base
except ImportError:
mixin_base = object
__all__ = ('FilterFormMixin',)
class FilterFormMixin(mixin_base):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
Set base class for view mixin to MultipleObjectMixin
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
<commit_before>try:
from django.views.generic.base import ContextMixin as mixin_base
except ImportError:
mixin_base = object
__all__ = ('FilterFormMixin',)
class FilterFormMixin(mixin_base):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
<commit_msg>Set base class for view mixin to MultipleObjectMixin<commit_after>
|
from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
try:
from django.views.generic.base import ContextMixin as mixin_base
except ImportError:
mixin_base = object
__all__ = ('FilterFormMixin',)
class FilterFormMixin(mixin_base):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
Set base class for view mixin to MultipleObjectMixinfrom django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
<commit_before>try:
from django.views.generic.base import ContextMixin as mixin_base
except ImportError:
mixin_base = object
__all__ = ('FilterFormMixin',)
class FilterFormMixin(mixin_base):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
<commit_msg>Set base class for view mixin to MultipleObjectMixin<commit_after>from django.views.generic.list import MultipleObjectMixin
__all__ = ('FilterFormMixin',)
class FilterFormMixin(MultipleObjectMixin):
"""
Mixin that adds filtering behaviour for Class Based Views.
Changed in a way that can play nicely with other CBV simply by overriding the get_queryset(self) and
get_context_data(self, **kwargs) method.
"""
filter_form_cls = None
use_filter_chaining = False
def get_filter(self):
return self.filter_form_cls(self.request.GET,
runtime_context=self.get_runtime_context(),
use_filter_chaining=self.use_filter_chaining)
def get_queryset(self):
qs = super(FilterFormMixin, self).get_queryset()
qs = self.get_filter().filter(qs).distinct()
return qs
def get_context_data(self, **kwargs):
context = super(FilterFormMixin, self).get_context_data(**kwargs)
context['filterform'] = self.get_filter()
return context
def get_runtime_context(self):
return {'user': self.request.user}
|
9a43f573f2072051c64fc6da432aaad5d31e0023
|
PyMarkdownGen/test/block_code_test.py
|
PyMarkdownGen/test/block_code_test.py
|
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
def test_block_quote(self):
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
"""This module contains the unit tests for
the formatting of block quotes.
"""
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
"""The test case (fixture) for testing block quotes."""
def test_block_quote(self):
"""Tests block quotes that contains a '>'
on every line.
"""
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
"""Tests block quotes that contain a '>'
only on the first line.
"""
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
Add docstrings for tests of block qotes
|
Add docstrings for tests of block qotes
|
Python
|
epl-1.0
|
LukasWoodtli/PyMarkdownGen
|
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
def test_block_quote(self):
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
Add docstrings for tests of block qotes
|
"""This module contains the unit tests for
the formatting of block quotes.
"""
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
"""The test case (fixture) for testing block quotes."""
def test_block_quote(self):
"""Tests block quotes that contains a '>'
on every line.
"""
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
"""Tests block quotes that contain a '>'
only on the first line.
"""
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
<commit_before>
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
def test_block_quote(self):
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
<commit_msg>Add docstrings for tests of block qotes<commit_after>
|
"""This module contains the unit tests for
the formatting of block quotes.
"""
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
"""The test case (fixture) for testing block quotes."""
def test_block_quote(self):
"""Tests block quotes that contains a '>'
on every line.
"""
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
"""Tests block quotes that contain a '>'
only on the first line.
"""
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
def test_block_quote(self):
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
Add docstrings for tests of block qotes"""This module contains the unit tests for
the formatting of block quotes.
"""
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
"""The test case (fixture) for testing block quotes."""
def test_block_quote(self):
"""Tests block quotes that contains a '>'
on every line.
"""
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
"""Tests block quotes that contain a '>'
only on the first line.
"""
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
<commit_before>
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
def test_block_quote(self):
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
<commit_msg>Add docstrings for tests of block qotes<commit_after>"""This module contains the unit tests for
the formatting of block quotes.
"""
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
"""The test case (fixture) for testing block quotes."""
def test_block_quote(self):
"""Tests block quotes that contains a '>'
on every line.
"""
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
"""Tests block quotes that contain a '>'
only on the first line.
"""
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
9330bcf1ae0ccde630e34a4eec8120dbdd44bcc4
|
utils/lit/tests/shared-output.py
|
utils/lit/tests/shared-output.py
|
# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
|
# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%S", "Inputs", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
|
Fix new test harder for systems that don't use / as os.path.sep
|
lit.py: Fix new test harder for systems that don't use / as os.path.sep
I didn't think about '%{inputs}' having the same problem. This one
should be a fully Windows path name.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315779 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm
|
# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
lit.py: Fix new test harder for systems that don't use / as os.path.sep
I didn't think about '%{inputs}' having the same problem. This one
should be a fully Windows path name.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315779 91177308-0d34-0410-b5e6-96231b3b80d8
|
# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%S", "Inputs", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
|
<commit_before># RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
<commit_msg>lit.py: Fix new test harder for systems that don't use / as os.path.sep
I didn't think about '%{inputs}' having the same problem. This one
should be a fully Windows path name.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315779 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%S", "Inputs", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
|
# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
lit.py: Fix new test harder for systems that don't use / as os.path.sep
I didn't think about '%{inputs}' having the same problem. This one
should be a fully Windows path name.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315779 91177308-0d34-0410-b5e6-96231b3b80d8# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%S", "Inputs", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
|
<commit_before># RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
<commit_msg>lit.py: Fix new test harder for systems that don't use / as os.path.sep
I didn't think about '%{inputs}' having the same problem. This one
should be a fully Windows path name.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315779 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after># RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%S", "Inputs", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
|
4548cf2a5ec69f75c19169769fffe88ea3d061e1
|
djlint/analyzers/context.py
|
djlint/analyzers/context.py
|
"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
|
"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def has_value(self, value):
dict_ = {}
for d in self.dicts:
dict_.update(d)
return value in dict_.values()
def __contains__(self, key):
return self.has_key(key)
|
Add has_value method to Context class
|
Add has_value method to Context class
|
Python
|
isc
|
alfredhq/djlint
|
"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
Add has_value method to Context class
|
"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def has_value(self, value):
dict_ = {}
for d in self.dicts:
dict_.update(d)
return value in dict_.values()
def __contains__(self, key):
return self.has_key(key)
|
<commit_before>"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
<commit_msg>Add has_value method to Context class<commit_after>
|
"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def has_value(self, value):
dict_ = {}
for d in self.dicts:
dict_.update(d)
return value in dict_.values()
def __contains__(self, key):
return self.has_key(key)
|
"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
Add has_value method to Context class"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def has_value(self, value):
dict_ = {}
for d in self.dicts:
dict_.update(d)
return value in dict_.values()
def __contains__(self, key):
return self.has_key(key)
|
<commit_before>"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def __contains__(self, key):
return self.has_key(key)
<commit_msg>Add has_value method to Context class<commit_after>"""Inspired by django.template.Context"""
class ContextPopException(Exception):
"""pop() has been called more times than push()"""
class Context(object):
"""A stack container for imports and assignments."""
def __init__(self):
self.dicts = [{}]
def push(self):
d = {}
self.dicts.append(d)
return d
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
self.dicts[-1][key] = value
def __getitem__(self, key):
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError
def __delitem__(self, key):
del self.dicts[-1][key]
def has_key(self, key):
for d in self.dicts:
if key in d:
return True
return False
def has_value(self, value):
dict_ = {}
for d in self.dicts:
dict_.update(d)
return value in dict_.values()
def __contains__(self, key):
return self.has_key(key)
|
7fbb06288af388f30d962da9dcee97d2c49dea82
|
serenata_toolbox/chamber_of_deputies/reimbursements.py
|
serenata_toolbox/chamber_of_deputies/reimbursements.py
|
import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, 'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
|
import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, f'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
|
Use f-string when calling method inside string
|
Use f-string when calling method inside string
|
Python
|
mit
|
datasciencebr/serenata-toolbox
|
import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, 'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
Use f-string when calling method inside string
|
import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, f'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
|
<commit_before>import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, 'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
<commit_msg>Use f-string when calling method inside string<commit_after>
|
import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, f'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
|
import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, 'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
Use f-string when calling method inside stringimport os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, f'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
|
<commit_before>import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, 'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
<commit_msg>Use f-string when calling method inside string<commit_after>import os.path
from tempfile import gettempdir
from urllib.request import urlretrieve
from zipfile import ZipFile
from .reimbursements_cleaner import ReimbursementsCleaner
URL = 'https://www.camara.leg.br/cotas/Ano-{}.csv.zip'
def extract_zip(zip_path, destination_path):
zip_file = ZipFile(zip_path, 'r')
zip_file.extractall(destination_path)
zip_file.close()
class Reimbursements:
"""
Get an updated version of the reimbursements dataset for a given year.
"""
def __init__(self, year, path=None):
self.year = year
self.path = path or gettempdir()
def __call__(self):
self.fetch()
self.clean()
file_path = os.path.join(self.path, f'reimbursements-{self.year}.csv')
return file_path
def fetch(self):
file_path = os.path.join(self.path, f'Ano-{self.year}.zip')
urlretrieve(URL.format(self.year), file_path)
extract_zip(file_path, self.path)
def clean(self):
ReimbursementsCleaner(self.year, self.path)()
|
cc3188e6870e378951efc3785f570496d1807813
|
aistreams/python/pip_package/setup.py
|
aistreams/python/pip_package/setup.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.1"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.2"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
|
Update minor version to 0.0.2.
|
Update minor version to 0.0.2.
Change-Id: I48cd0789ec87d0edd20e9a2980a61b54f8c6b7a6
|
Python
|
apache-2.0
|
google/aistreams,google/aistreams,google/aistreams
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.1"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
Update minor version to 0.0.2.
Change-Id: I48cd0789ec87d0edd20e9a2980a61b54f8c6b7a6
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.2"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
|
<commit_before># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.1"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
<commit_msg>Update minor version to 0.0.2.
Change-Id: I48cd0789ec87d0edd20e9a2980a61b54f8c6b7a6<commit_after>
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.2"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.1"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
Update minor version to 0.0.2.
Change-Id: I48cd0789ec87d0edd20e9a2980a61b54f8c6b7a6# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.2"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
|
<commit_before># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.1"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
<commit_msg>Update minor version to 0.0.2.
Change-Id: I48cd0789ec87d0edd20e9a2980a61b54f8c6b7a6<commit_after># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AI Streams Python SDK."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import setuptools
VERSION = "0.0.2"
REQUIRED_PACKAGES = []
with open("requirements.txt", "r") as f:
REQUIRED_PACKAGES = f.read().splitlines()
CONSOLE_SCRIPTS = ["aisctl = aistreams.python.cli.aisctl:main"]
setuptools.setup(
name="aistreams",
version=VERSION,
author="Google Inc.",
author_email="",
description="AI Streams Python SDK",
long_description="AI Streams Python SDK",
long_description_content_type="text/markdown",
url="",
packages=setuptools.find_packages(),
install_requires=REQUIRED_PACKAGES,
entry_points={"console_scripts": CONSOLE_SCRIPTS},
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
],
python_requires=">=3.6",
)
|
2001f6460e31f3657c3ed2dc7e118452362ab847
|
month/widgets.py
|
month/widgets.py
|
"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
|
"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
if isinstance(value, basestring):
m = int(value[5:7])
y = int(value[:4])
return [ m, y ]
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
|
Handle case where deconstruct receives string
|
Handle case where deconstruct receives string
|
Python
|
bsd-3-clause
|
mpachas/django-monthfield,clearspark/django-monthfield
|
"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
Handle case where deconstruct receives string
|
"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
if isinstance(value, basestring):
m = int(value[5:7])
y = int(value[:4])
return [ m, y ]
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
|
<commit_before>"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
<commit_msg>Handle case where deconstruct receives string<commit_after>
|
"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
if isinstance(value, basestring):
m = int(value[5:7])
y = int(value[:4])
return [ m, y ]
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
|
"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
Handle case where deconstruct receives string"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
if isinstance(value, basestring):
m = int(value[5:7])
y = int(value[:4])
return [ m, y ]
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
|
<commit_before>"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
<commit_msg>Handle case where deconstruct receives string<commit_after>"""
Select widget for MonthField. Copied and modified from
https://docs.djangoproject.com/en/1.8/ref/forms/widgets/#base-widget-classes
"""
from datetime import date
from django.forms import widgets
class MonthSelectorWidget(widgets.MultiWidget):
def __init__(self, attrs=None):
# create choices for days, months, years
# example below, the rest snipped for brevity.
years = [(year, year) for year in range(2000, 2020)]
months = [ (month, month) for month in range(1, 13)]
_widgets = (
widgets.Select(attrs=attrs, choices=months),
widgets.Select(attrs=attrs, choices=years),
)
super(MonthSelectorWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if value:
if isinstance(value, basestring):
m = int(value[5:7])
y = int(value[:4])
return [ m, y ]
return [value.month, value.year]
return [ None, None]
def format_output(self, rendered_widgets):
return ''.join(rendered_widgets)
def value_from_datadict(self, data, files, name):
datelist = [
widget.value_from_datadict(data, files, name + '_%s' % i)
for i, widget in enumerate(self.widgets)]
try:
D = date(day=1, month=int(datelist[0]),
year=int(datelist[1]))
except ValueError:
return ''
else:
return str(D)
|
9b774ae7e5725ffbf3f8f0780b67d1f7e5bff98d
|
ircelsos/sos.py
|
ircelsos/sos.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
return SensorObservationService(BASE_URL)
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
import sys
import os
import datetime
from xml.etree import ElementTree
import requests
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
data_dir = ircelsos_data_dir()
if not os.path.exists(data_dir):
os.makedirs(data_dir)
xml_file = os.path.join(data_dir, 'capabilities.xml')
if os.path.isfile(xml_file):
xml = file(xml_file).read()
adapted = datetime.datetime.fromtimestamp(os.path.getmtime(xml_file))
outdated = (datetime.datetime.now() - adapted) > datetime.timedelta(1)
else:
xml = None
outdated = True
if not outdated:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
try:
sos = SensorObservationService(BASE_URL)
except requests.ConnectionError:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
with open(xml_file, 'w') as xml:
xml.write(ElementTree.tostring(sos._capabilities))
return sos
def ircelsos_data_dir():
"""Get the data directory
Adapted from jupyter_core
"""
home = os.path.expanduser('~')
if sys.platform == 'darwin':
return os.path.join(home, 'Library', 'ircelsos')
elif os.name == 'nt':
appdata = os.environ.get('APPDATA', os.path.join(home, '.local', 'share'))
return os.path.join(appdata, 'ircelsos')
else:
# Linux, non-OS X Unix, AIX, etc.
xdg = os.environ.get("XDG_DATA_HOME", os.path.join(home, '.local', 'share'))
return os.path.join(xdg, 'ircelsos')
|
Enable offline import + save capabilities xml for reuse
|
Enable offline import + save capabilities xml for reuse
|
Python
|
bsd-2-clause
|
jorisvandenbossche/ircelsos
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
return SensorObservationService(BASE_URL)
Enable offline import + save capabilities xml for reuse
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
import sys
import os
import datetime
from xml.etree import ElementTree
import requests
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
data_dir = ircelsos_data_dir()
if not os.path.exists(data_dir):
os.makedirs(data_dir)
xml_file = os.path.join(data_dir, 'capabilities.xml')
if os.path.isfile(xml_file):
xml = file(xml_file).read()
adapted = datetime.datetime.fromtimestamp(os.path.getmtime(xml_file))
outdated = (datetime.datetime.now() - adapted) > datetime.timedelta(1)
else:
xml = None
outdated = True
if not outdated:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
try:
sos = SensorObservationService(BASE_URL)
except requests.ConnectionError:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
with open(xml_file, 'w') as xml:
xml.write(ElementTree.tostring(sos._capabilities))
return sos
def ircelsos_data_dir():
"""Get the data directory
Adapted from jupyter_core
"""
home = os.path.expanduser('~')
if sys.platform == 'darwin':
return os.path.join(home, 'Library', 'ircelsos')
elif os.name == 'nt':
appdata = os.environ.get('APPDATA', os.path.join(home, '.local', 'share'))
return os.path.join(appdata, 'ircelsos')
else:
# Linux, non-OS X Unix, AIX, etc.
xdg = os.environ.get("XDG_DATA_HOME", os.path.join(home, '.local', 'share'))
return os.path.join(xdg, 'ircelsos')
|
<commit_before># -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
return SensorObservationService(BASE_URL)
<commit_msg>Enable offline import + save capabilities xml for reuse<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
import sys
import os
import datetime
from xml.etree import ElementTree
import requests
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
data_dir = ircelsos_data_dir()
if not os.path.exists(data_dir):
os.makedirs(data_dir)
xml_file = os.path.join(data_dir, 'capabilities.xml')
if os.path.isfile(xml_file):
xml = file(xml_file).read()
adapted = datetime.datetime.fromtimestamp(os.path.getmtime(xml_file))
outdated = (datetime.datetime.now() - adapted) > datetime.timedelta(1)
else:
xml = None
outdated = True
if not outdated:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
try:
sos = SensorObservationService(BASE_URL)
except requests.ConnectionError:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
with open(xml_file, 'w') as xml:
xml.write(ElementTree.tostring(sos._capabilities))
return sos
def ircelsos_data_dir():
"""Get the data directory
Adapted from jupyter_core
"""
home = os.path.expanduser('~')
if sys.platform == 'darwin':
return os.path.join(home, 'Library', 'ircelsos')
elif os.name == 'nt':
appdata = os.environ.get('APPDATA', os.path.join(home, '.local', 'share'))
return os.path.join(appdata, 'ircelsos')
else:
# Linux, non-OS X Unix, AIX, etc.
xdg = os.environ.get("XDG_DATA_HOME", os.path.join(home, '.local', 'share'))
return os.path.join(xdg, 'ircelsos')
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
return SensorObservationService(BASE_URL)
Enable offline import + save capabilities xml for reuse# -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
import sys
import os
import datetime
from xml.etree import ElementTree
import requests
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
data_dir = ircelsos_data_dir()
if not os.path.exists(data_dir):
os.makedirs(data_dir)
xml_file = os.path.join(data_dir, 'capabilities.xml')
if os.path.isfile(xml_file):
xml = file(xml_file).read()
adapted = datetime.datetime.fromtimestamp(os.path.getmtime(xml_file))
outdated = (datetime.datetime.now() - adapted) > datetime.timedelta(1)
else:
xml = None
outdated = True
if not outdated:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
try:
sos = SensorObservationService(BASE_URL)
except requests.ConnectionError:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
with open(xml_file, 'w') as xml:
xml.write(ElementTree.tostring(sos._capabilities))
return sos
def ircelsos_data_dir():
"""Get the data directory
Adapted from jupyter_core
"""
home = os.path.expanduser('~')
if sys.platform == 'darwin':
return os.path.join(home, 'Library', 'ircelsos')
elif os.name == 'nt':
appdata = os.environ.get('APPDATA', os.path.join(home, '.local', 'share'))
return os.path.join(appdata, 'ircelsos')
else:
# Linux, non-OS X Unix, AIX, etc.
xdg = os.environ.get("XDG_DATA_HOME", os.path.join(home, '.local', 'share'))
return os.path.join(xdg, 'ircelsos')
|
<commit_before># -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
return SensorObservationService(BASE_URL)
<commit_msg>Enable offline import + save capabilities xml for reuse<commit_after># -*- coding: utf-8 -*-
"""
Created on Sun Jul 12 23:16:17 2015
@author: Joris Van den Bossche
"""
from __future__ import print_function
import sys
import os
import datetime
from xml.etree import ElementTree
import requests
from owslib.sos import SensorObservationService
BASE_URL = 'http://sos.irceline.be/sos'
def get_sos():
"""Return a SensorObservationService instance"""
data_dir = ircelsos_data_dir()
if not os.path.exists(data_dir):
os.makedirs(data_dir)
xml_file = os.path.join(data_dir, 'capabilities.xml')
if os.path.isfile(xml_file):
xml = file(xml_file).read()
adapted = datetime.datetime.fromtimestamp(os.path.getmtime(xml_file))
outdated = (datetime.datetime.now() - adapted) > datetime.timedelta(1)
else:
xml = None
outdated = True
if not outdated:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
try:
sos = SensorObservationService(BASE_URL)
except requests.ConnectionError:
sos = SensorObservationService(BASE_URL, xml=xml)
else:
with open(xml_file, 'w') as xml:
xml.write(ElementTree.tostring(sos._capabilities))
return sos
def ircelsos_data_dir():
"""Get the data directory
Adapted from jupyter_core
"""
home = os.path.expanduser('~')
if sys.platform == 'darwin':
return os.path.join(home, 'Library', 'ircelsos')
elif os.name == 'nt':
appdata = os.environ.get('APPDATA', os.path.join(home, '.local', 'share'))
return os.path.join(appdata, 'ircelsos')
else:
# Linux, non-OS X Unix, AIX, etc.
xdg = os.environ.get("XDG_DATA_HOME", os.path.join(home, '.local', 'share'))
return os.path.join(xdg, 'ircelsos')
|
1d66cbb31ba6da6c72290352c234680caba89594
|
rpy2_helpers.py
|
rpy2_helpers.py
|
#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
|
#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
"""Call lattice's `xyplot` with the given `formula` and `data`.
You can supply `formula` as a string or rpy2 `Formula`.
You can supply `data` as a dict or rpy2 `DataFrame`.
"""
if not isinstance(data, DataFrame):
data = DataFrame(data)
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
|
Document the xyplot function and support data dict
|
Document the xyplot function and support data dict
|
Python
|
mit
|
ecashin/rpy2_helpers
|
#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
Document the xyplot function and support data dict
|
#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
"""Call lattice's `xyplot` with the given `formula` and `data`.
You can supply `formula` as a string or rpy2 `Formula`.
You can supply `data` as a dict or rpy2 `DataFrame`.
"""
if not isinstance(data, DataFrame):
data = DataFrame(data)
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
|
<commit_before>#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
<commit_msg>Document the xyplot function and support data dict<commit_after>
|
#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
"""Call lattice's `xyplot` with the given `formula` and `data`.
You can supply `formula` as a string or rpy2 `Formula`.
You can supply `data` as a dict or rpy2 `DataFrame`.
"""
if not isinstance(data, DataFrame):
data = DataFrame(data)
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
|
#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
Document the xyplot function and support data dict#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
"""Call lattice's `xyplot` with the given `formula` and `data`.
You can supply `formula` as a string or rpy2 `Formula`.
You can supply `data` as a dict or rpy2 `DataFrame`.
"""
if not isinstance(data, DataFrame):
data = DataFrame(data)
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
|
<commit_before>#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
<commit_msg>Document the xyplot function and support data dict<commit_after>#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
"""Call lattice's `xyplot` with the given `formula` and `data`.
You can supply `formula` as a string or rpy2 `Formula`.
You can supply `data` as a dict or rpy2 `DataFrame`.
"""
if not isinstance(data, DataFrame):
data = DataFrame(data)
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
|
cac5d03fc56bf0d4fd6f2daba7942d5d379e344b
|
content/util/webassets_integration.py
|
content/util/webassets_integration.py
|
class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
return self.env[bundle_name].urls()[0]
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
|
class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
urls = self.env[bundle_name].urls()
return "/{}".format(urls[0]) # /{} to make url absolute
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
|
Prepend '/' to urls gotten from asset_url() to make them absolute. This fixes /projects/* documentation not displaying correctly.
|
Prepend '/' to urls gotten from asset_url() to make them absolute. This fixes /projects/* documentation not displaying correctly.
|
Python
|
apache-2.0
|
daboross/dabo.guru,daboross/dabo.guru,daboross/dabo.guru,daboross/dabo.guru
|
class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
return self.env[bundle_name].urls()[0]
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
Prepend '/' to urls gotten from asset_url() to make them absolute. This fixes /projects/* documentation not displaying correctly.
|
class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
urls = self.env[bundle_name].urls()
return "/{}".format(urls[0]) # /{} to make url absolute
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
|
<commit_before>class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
return self.env[bundle_name].urls()[0]
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
<commit_msg>Prepend '/' to urls gotten from asset_url() to make them absolute. This fixes /projects/* documentation not displaying correctly.<commit_after>
|
class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
urls = self.env[bundle_name].urls()
return "/{}".format(urls[0]) # /{} to make url absolute
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
|
class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
return self.env[bundle_name].urls()[0]
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
Prepend '/' to urls gotten from asset_url() to make them absolute. This fixes /projects/* documentation not displaying correctly.class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
urls = self.env[bundle_name].urls()
return "/{}".format(urls[0]) # /{} to make url absolute
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
|
<commit_before>class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
return self.env[bundle_name].urls()[0]
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
<commit_msg>Prepend '/' to urls gotten from asset_url() to make them absolute. This fixes /projects/* documentation not displaying correctly.<commit_after>class Integration:
def __init__(self, env):
"""
:type env: webassets.Environment
"""
self.env = env
def asset_url(self, bundle_name):
"""
:type bundle_name: str
"""
urls = self.env[bundle_name].urls()
return "/{}".format(urls[0]) # /{} to make url absolute
def register(self, app):
app.jinja_env.globals.update(asset_url=self.asset_url)
|
6ad8c9fc7846f51e2f784d38f9a92017552da996
|
lanes/models.py
|
lanes/models.py
|
from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
|
from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class Period(validators.Period):
validator = models.ForeignKey(LaneWithinPeriodValidator, related_name='periods')
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
class LaneMaxReservationsPerUserValidator(validators.MaxReservationsPerUserValidator):
pass
|
Add new validators to SPA
|
Add new validators to SPA
|
Python
|
mit
|
mbad/kitabu,mbad/kitabu,mbad/kitabu
|
from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
Add new validators to SPA
|
from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class Period(validators.Period):
validator = models.ForeignKey(LaneWithinPeriodValidator, related_name='periods')
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
class LaneMaxReservationsPerUserValidator(validators.MaxReservationsPerUserValidator):
pass
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
<commit_msg>Add new validators to SPA<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class Period(validators.Period):
validator = models.ForeignKey(LaneWithinPeriodValidator, related_name='periods')
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
class LaneMaxReservationsPerUserValidator(validators.MaxReservationsPerUserValidator):
pass
|
from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
Add new validators to SPAfrom django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class Period(validators.Period):
validator = models.ForeignKey(LaneWithinPeriodValidator, related_name='periods')
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
class LaneMaxReservationsPerUserValidator(validators.MaxReservationsPerUserValidator):
pass
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
<commit_msg>Add new validators to SPA<commit_after>from django.db import models
from django.contrib.auth.models import User
from kitabu.models.subjects import VariableSizeSubject, BaseSubject
from kitabu.models.reservations import ReservationMaybeExclusive, ReservationGroup, BaseReservation
from kitabu.models import validators
from pools.models import Pool
class Lane(VariableSizeSubject, BaseSubject):
name = models.TextField()
cluster = models.ForeignKey(Pool, related_name='subjects')
def __unicode__(self):
return self.name
class LaneReservation(ReservationMaybeExclusive, BaseReservation):
subject = models.ForeignKey('Lane', related_name='reservations')
group = models.ForeignKey('LaneReservationGroup', related_name='reservations', null=True, blank=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return "%s from %s to %s (%s places)" % (self.subject.name, self.start, self.end, self.size)
class LaneReservationGroup(ReservationGroup):
pass
class LaneFullTimeValidator(validators.FullTimeValidator):
pass
class LaneTimeIntervalValidator(validators.TimeIntervalValidator):
pass
class LaneWithinPeriodValidator(validators.WithinPeriodValidator):
pass
class Period(validators.Period):
validator = models.ForeignKey(LaneWithinPeriodValidator, related_name='periods')
class LaneNotWithinPeriodValidator(validators.NotWithinPeriodValidator):
pass
class LaneGivenHoursAndWeekdaysValidator(validators.GivenHoursAndWeekdaysValidator):
pass
class LaneMaxReservationsPerUserValidator(validators.MaxReservationsPerUserValidator):
pass
|
d6c4a38e172894a2240a658fe73ea9816e89cd03
|
deduplicated/web/__init__.py
|
deduplicated/web/__init__.py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, render_template
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, redirect, render_template, request
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
@app.route('/dir/add', methods=['post'])
def diradd():
dirname = request.form.get('directory', '')
if dirname:
Directory(dirname)
return redirect('/')
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
Add web function for add new directory
|
Add web function for add new directory
|
Python
|
mit
|
eduardoklosowski/deduplicated,eduardoklosowski/deduplicated
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, render_template
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
Add web function for add new directory
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, redirect, render_template, request
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
@app.route('/dir/add', methods=['post'])
def diradd():
dirname = request.form.get('directory', '')
if dirname:
Directory(dirname)
return redirect('/')
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, render_template
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
<commit_msg>Add web function for add new directory<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, redirect, render_template, request
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
@app.route('/dir/add', methods=['post'])
def diradd():
dirname = request.form.get('directory', '')
if dirname:
Directory(dirname)
return redirect('/')
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, render_template
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
Add web function for add new directory# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, redirect, render_template, request
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
@app.route('/dir/add', methods=['post'])
def diradd():
dirname = request.form.get('directory', '')
if dirname:
Directory(dirname)
return redirect('/')
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, render_template
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
<commit_msg>Add web function for add new directory<commit_after># -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Eduardo Klosowski
# License: MIT (see LICENSE for details)
#
from flask import Flask, redirect, render_template, request
import jinja2
from .. import Directory, directory_list, str_size
# Init app
jinja2.filters.FILTERS['str_size'] = str_size
app = Flask(__name__)
# Pages
@app.route('/')
def dirlist():
directories = [(d[0], Directory(d[1])) for d in directory_list()]
directories.sort(key=lambda d: str(d[1]).lower())
return render_template('dirlist.html', directories=directories)
@app.route('/dir/add', methods=['post'])
def diradd():
dirname = request.form.get('directory', '')
if dirname:
Directory(dirname)
return redirect('/')
# Run
def main():
import sys
from gunicorn.app.wsgiapp import run
sys.argv = ['gunicorn',
'--access-logfile=-',
'--error-logfile=-',
'-b', '127.0.0.1:5050',
'deduplicated.web:app']
run()
|
00adc1c77d2bcc231a7f8995558ed86bb8071ae7
|
zun/websocket/websocketclient.py
|
zun/websocket/websocketclient.py
|
# Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import socket
import websocket
from zun.common import exception
LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
# Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import websocket
from zun.common import exception
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
Remove unused LOG in websocket
|
Remove unused LOG in websocket
Change-Id: Ic45e5e4353dd816fd5416b880aa47df8542b2e02
|
Python
|
apache-2.0
|
kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun
|
# Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import socket
import websocket
from zun.common import exception
LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
Remove unused LOG in websocket
Change-Id: Ic45e5e4353dd816fd5416b880aa47df8542b2e02
|
# Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import websocket
from zun.common import exception
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
<commit_before># Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import socket
import websocket
from zun.common import exception
LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
<commit_msg>Remove unused LOG in websocket
Change-Id: Ic45e5e4353dd816fd5416b880aa47df8542b2e02<commit_after>
|
# Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import websocket
from zun.common import exception
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
# Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import socket
import websocket
from zun.common import exception
LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
Remove unused LOG in websocket
Change-Id: Ic45e5e4353dd816fd5416b880aa47df8542b2e02# Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import websocket
from zun.common import exception
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
<commit_before># Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import socket
import websocket
from zun.common import exception
LOG = logging.getLogger(__name__)
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
<commit_msg>Remove unused LOG in websocket
Change-Id: Ic45e5e4353dd816fd5416b880aa47df8542b2e02<commit_after># Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import websocket
from zun.common import exception
class WebSocketClient(object):
def __init__(self, host_url, escape='~',
close_wait=0.5):
self.escape = escape
self.close_wait = close_wait
self.host_url = host_url
self.cs = None
def connect(self):
url = self.host_url
try:
self.ws = websocket.create_connection(url,
skip_utf8_validation=True)
except socket.error as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketConnectionClosedException as e:
raise exception.ConnectionFailed(e)
except websocket.WebSocketBadStatusException as e:
raise exception.ConnectionFailed(e)
|
7fc366e4ca45cbbdf2c1ab4a476af40ba88a9ae8
|
defender/urls.py
|
defender/urls.py
|
from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[a-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[a-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
|
from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[A-Za-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[A-Za-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
|
Fix for usernames with capital letters
|
Fix for usernames with capital letters
|
Python
|
apache-2.0
|
kencochrane/django-defender,kencochrane/django-defender
|
from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[a-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[a-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
Fix for usernames with capital letters
|
from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[A-Za-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[A-Za-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
|
<commit_before>from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[a-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[a-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
<commit_msg>Fix for usernames with capital letters<commit_after>
|
from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[A-Za-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[A-Za-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
|
from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[a-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[a-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
Fix for usernames with capital lettersfrom django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[A-Za-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[A-Za-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
|
<commit_before>from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[a-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[a-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
<commit_msg>Fix for usernames with capital letters<commit_after>from django.conf.urls import patterns, url
from .views import block_view, unblock_ip_view, unblock_username_view
urlpatterns = patterns(
'',
url(r'^blocks/$', block_view,
name="defender_blocks_view"),
url(r'^blocks/ip/(?P<ip_address>[A-Za-z0-9-._]+)/unblock$', unblock_ip_view,
name="defender_unblock_ip_view"),
url(r'^blocks/username/(?P<username>[A-Za-z0-9-._@]+)/unblock$',
unblock_username_view,
name="defender_unblock_username_view"),
)
|
e2d51e23f530202b82ba13ae11c686deb1388435
|
prototype/BioID.py
|
prototype/BioID.py
|
#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recog
|
#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recog
|
Indent return in identify class.
|
Indent return in identify class.
|
Python
|
mit
|
LeeBergstrand/BioMagick,LeeBergstrand/BioMagick
|
#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recogIndent return in identify class.
|
#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recog
|
<commit_before>#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recog<commit_msg>Indent return in identify class.<commit_after>
|
#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recog
|
#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recogIndent return in identify class.#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recog
|
<commit_before>#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recog<commit_msg>Indent return in identify class.<commit_after>#!/usr/bin/env python
#
# A class for auto identifying bioinformatics file formats.
# By Lee & Matt
import re
import json
import mmap
class BioID:
defs = None
def __init__(self, defpath):
with open(defpath, "r") as deffile:
conts = deffile.read()
self.defs = json.loads(conts)["formats"]
@classmethod
def identify(cls, files):
recog = {}
for file in files:
with open(file, "r") as infile:
buff = infile.read()
mem_map = mmap.mmap(infile.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ)
if len(buff) == 0:
recog[file] = "empty" # Empty files have no format :)
continue
for fdef in cls.defs:
matched = True
if "regexen" in fdef:
for regex in fdef["regexen"]:
if not re.findall(regex.replace("\\n", "\n"), buff, re.IGNORECASE):
matched = False
break
if "bytes" in fdef:
for bytes in fdef["bytes"]:
if mem_map.find(bytes.decode("string_escape")) == -1:
matched = False
break
if matched:
recog[file] = fdef["name"]
break
mem_map.close()
if file not in recog:
recog[file] = "unrecognized"
return recog
|
be9c0169abd0419535fdc87b8f498cc65f5d7dd0
|
pycrawl/crawler.py
|
pycrawl/crawler.py
|
'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
|
'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content, url=url)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
|
Modify the Page parameter with url supplied
|
Modify the Page parameter with url supplied
|
Python
|
mit
|
slaveofcode/pycrawler,slaveofcode/pycrawler
|
'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
Modify the Page parameter with url supplied
|
'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content, url=url)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
|
<commit_before>'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
<commit_msg>Modify the Page parameter with url supplied<commit_after>
|
'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content, url=url)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
|
'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
Modify the Page parameter with url supplied'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content, url=url)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
|
<commit_before>'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
<commit_msg>Modify the Page parameter with url supplied<commit_after>'''
from pycrawl import crawler
# Execute variants
page = crawler.grab('http://www.pasarpanda.com')
page = crawler.from_file('file.html')
page = crawler.from_text("<html></html>")
# methods
page.html('#my_container') # get html content by given css selector
page.text('#my_container') # get text content by given css selector
page.images() # all images links
# information provided
page.title # page title
page.encoding # page encoding
page.language # text language of page
page.metas # list dictionary of meta
page.content # text content
page.copyright # copyright
page.links # all links on the site
page.original_links # all original links (the domain links are same as the requested page)
page.resource_links # all js & css links
page.js_links # all javascript links
page.css_links # all css links
page.favicon # favicon url
'''
from .http import HttpRequest
from .page import Page
class Crawler:
@classmethod
def grab(cls, url):
content = HttpRequest.get(url)
return Page(content, url=url)
@classmethod
def from_file(cls, abs_path):
f = open(abs_path, 'r')
content = f.read()
f.close()
return Page(content)
@classmethod
def from_text(cls, text):
return Page(text)
|
afac07ce173af3e7db4a6ba6dab4786903e217b7
|
ocradmin/ocr/tools/plugins/cuneiform_wrapper.py
|
ocradmin/ocr/tools/plugins/cuneiform_wrapper.py
|
"""
Wrapper for Cuneiform.
"""
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
|
"""
Wrapper for Cuneiform.
"""
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
"""
Wrapper for Cuneiform.
"""
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output
|
"""
Wrapper for Cuneiform.
"""
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
<commit_before>"""
Wrapper for Cuneiform.
"""
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
<commit_msg>Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output<commit_after>
|
"""
Wrapper for Cuneiform.
"""
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
"""
Wrapper for Cuneiform.
"""
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output"""
Wrapper for Cuneiform.
"""
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
<commit_before>"""
Wrapper for Cuneiform.
"""
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
<commit_msg>Allow Cuneiform to do full page conversions. Downsides: 1) it crashes on quite a lot of pages 2) there's no progress output<commit_after>"""
Wrapper for Cuneiform.
"""
import tempfile
import subprocess as sp
from ocradmin.ocr.tools import check_aborted, set_progress
from ocradmin.ocr.utils import HocrParser
from generic_wrapper import *
def main_class():
return CuneiformWrapper
class CuneiformWrapper(GenericWrapper):
"""
Override certain methods of the OcropusWrapper to
use Cuneiform for recognition of individual lines.
"""
name = "cuneiform"
capabilities = ("line", "page")
binary = get_binary("cuneiform")
def get_command(self, outfile, image):
"""
Cuneiform command line. Simplified for now.
"""
return [self.binary, "-o", outfile, image]
def convert(self, filepath, *args, **kwargs):
"""
Convert a full page.
"""
json = None
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.close()
args = [self.binary, "-f", "hocr", "-o", tmp.name, filepath]
self.logger.info(args)
proc = sp.Popen(args, stderr=sp.PIPE)
err = proc.stderr.read()
if proc.wait() != 0:
return "!!! %s CONVERSION ERROR %d: %s !!!" % (
os.path.basename(self.binary).upper(),
proc.returncode, err)
json = HocrParser().parsefile(tmp.name)
self.logger.info("%s" % json)
os.unlink(tmp.name)
set_progress(self.logger, kwargs["progress_func"], 100, 100)
return json
|
b04f6ff01e105883280e06e5d7a2d767664cd318
|
config/settings_production.py
|
config/settings_production.py
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('../etc/secret_key') as f:
SECRET_KEY = f.read().strip()
|
Read secret from correct location
|
Read secret from correct location
|
Python
|
agpl-3.0
|
mmilaprat/policycompass-services,policycompass/policycompass-services,policycompass/policycompass-services,policycompass/policycompass-services,mmilaprat/policycompass-services,mmilaprat/policycompass-services
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
Read secret from correct location
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('../etc/secret_key') as f:
SECRET_KEY = f.read().strip()
|
<commit_before>"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
<commit_msg>Read secret from correct location<commit_after>
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('../etc/secret_key') as f:
SECRET_KEY = f.read().strip()
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
Read secret from correct location"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('../etc/secret_key') as f:
SECRET_KEY = f.read().strip()
|
<commit_before>"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
<commit_msg>Read secret from correct location<commit_after>"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('../etc/secret_key') as f:
SECRET_KEY = f.read().strip()
|
c69e4e6f3aab80ad3ac28e7a6b13f309a1b2d205
|
alembic/versions/151b2f642877_text_to_json.py
|
alembic/versions/151b2f642877_text_to_json.py
|
"""text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
|
"""text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
|
Fix alembic revision after merge master
|
Fix alembic revision after merge master
|
Python
|
agpl-3.0
|
PyBossa/pybossa,Scifabric/pybossa,Scifabric/pybossa,PyBossa/pybossa,jean/pybossa,OpenNewsLabs/pybossa,geotagx/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,jean/pybossa
|
"""text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
Fix alembic revision after merge master
|
"""text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
|
<commit_before>"""text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
<commit_msg>Fix alembic revision after merge master<commit_after>
|
"""text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
|
"""text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
Fix alembic revision after merge master"""text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
|
<commit_before>"""text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
<commit_msg>Fix alembic revision after merge master<commit_after>"""text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
|
8c780f99dd82887a43c8ce661925f993fbc41003
|
readux/__init__.py
|
readux/__init__.py
|
from django.conf import settings
__version_info__ = (1, 2, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
|
Bump version to 1.3.0-dev after releasing 1.2
|
Bump version to 1.3.0-dev after releasing 1.2
|
Python
|
apache-2.0
|
emory-libraries/readux,emory-libraries/readux,emory-libraries/readux
|
from django.conf import settings
__version_info__ = (1, 2, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
Bump version to 1.3.0-dev after releasing 1.2
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
|
<commit_before>from django.conf import settings
__version_info__ = (1, 2, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
<commit_msg>Bump version to 1.3.0-dev after releasing 1.2<commit_after>
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
|
from django.conf import settings
__version_info__ = (1, 2, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
Bump version to 1.3.0-dev after releasing 1.2__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
|
<commit_before>from django.conf import settings
__version_info__ = (1, 2, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
<commit_msg>Bump version to 1.3.0-dev after releasing 1.2<commit_after>__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
}
|
e06cc3da015505bc58eb705b5ee77fbbaae61a09
|
har/model/log.py
|
har/model/log.py
|
from datetime import datetime
from har import db
class Log(db.Model):
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
|
from datetime import datetime
from har import db
class Log(db.Model):
STATUS_PENDING = "pending"
STATUS_PENDING = "trained"
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
status = db.Column(db.String(40))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.status = Log.STATUS_PENDING
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
|
Add status column to Log model
|
Add status column to Log model
|
Python
|
mit
|
ilhamadun/har,ilhamadun/har
|
from datetime import datetime
from har import db
class Log(db.Model):
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
Add status column to Log model
|
from datetime import datetime
from har import db
class Log(db.Model):
STATUS_PENDING = "pending"
STATUS_PENDING = "trained"
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
status = db.Column(db.String(40))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.status = Log.STATUS_PENDING
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
|
<commit_before>from datetime import datetime
from har import db
class Log(db.Model):
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
<commit_msg>Add status column to Log model<commit_after>
|
from datetime import datetime
from har import db
class Log(db.Model):
STATUS_PENDING = "pending"
STATUS_PENDING = "trained"
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
status = db.Column(db.String(40))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.status = Log.STATUS_PENDING
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
|
from datetime import datetime
from har import db
class Log(db.Model):
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
Add status column to Log modelfrom datetime import datetime
from har import db
class Log(db.Model):
STATUS_PENDING = "pending"
STATUS_PENDING = "trained"
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
status = db.Column(db.String(40))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.status = Log.STATUS_PENDING
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
|
<commit_before>from datetime import datetime
from har import db
class Log(db.Model):
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
<commit_msg>Add status column to Log model<commit_after>from datetime import datetime
from har import db
class Log(db.Model):
STATUS_PENDING = "pending"
STATUS_PENDING = "trained"
id = db.Column(db.Integer, primary_key=True)
subject_id = db.Column(db.Integer, db.ForeignKey('subject.device'))
log_type = db.Column(db.String(40))
activity = db.Column(db.String(40))
sensor_placement = db.Column(db.String(40))
number_of_sensor = db.Column(db.Integer)
total_sensor_axis = db.Column(db.Integer)
number_of_entry = db.Column(db.Integer)
path = db.Column(db.String(250))
status = db.Column(db.String(40))
timestamp = db.Column(db.DateTime)
def __init__(self, subject_id, log_type, activity, sensor_placement, number_of_sensor,
total_sensor_axis, number_of_entry, path):
self.subject_id = subject_id
self.log_type = log_type
self.activity = activity
self.sensor_placement = sensor_placement
self.number_of_sensor = number_of_sensor
self.total_sensor_axis = total_sensor_axis
self.number_of_entry = number_of_entry
self.path = path
self.status = Log.STATUS_PENDING
self.timestamp = datetime.now()
def __repr__(self):
return '<%s log from subject %i>' % self.log_type, self.subject
|
a8080b402e408e5f2636039f24debd717f06b982
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='mailosaur',
version='3.0',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.1',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
Bump version number to 3.0.1
|
Bump version number to 3.0.1
|
Python
|
mit
|
mailosaur/mailosaur-python,mailosaurapp/mailosaur-python,mailosaur/mailosaur-python
|
from setuptools import setup
setup(name='mailosaur',
version='3.0',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
Bump version number to 3.0.1
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.1',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
<commit_before>from setuptools import setup
setup(name='mailosaur',
version='3.0',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
<commit_msg>Bump version number to 3.0.1<commit_after>
|
from setuptools import setup
setup(name='mailosaur',
version='3.0.1',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
from setuptools import setup
setup(name='mailosaur',
version='3.0',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
Bump version number to 3.0.1from setuptools import setup
setup(name='mailosaur',
version='3.0.1',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
<commit_before>from setuptools import setup
setup(name='mailosaur',
version='3.0',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
<commit_msg>Bump version number to 3.0.1<commit_after>from setuptools import setup
setup(name='mailosaur',
version='3.0.1',
description='Python client library for Mailosaur',
url='https://mailosaur.com',
author='Clickity Ltd',
author_email='support@mailosaur.com',
keywords='email automation testing selenium robot framework',
license='MIT',
packages=['mailosaur'],
install_requires=[
'python-dateutil',
'requests',
'requests[security]'
],
zip_safe=False, requires=['requests'])
|
9da9ec6618de8c9a1276e44e81c32639d42efada
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='hy-py',
version='0.0.2',
packages=['hy'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
|
from distutils.core import setup
setup(
name='hy-py',
version='0.0.3',
packages=['hy', 'hy.adapters'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
|
Include the serializer adapters in the PyPI package
|
Include the serializer adapters in the PyPI package
|
Python
|
mit
|
kalasjocke/hyp
|
from distutils.core import setup
setup(
name='hy-py',
version='0.0.2',
packages=['hy'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
Include the serializer adapters in the PyPI package
|
from distutils.core import setup
setup(
name='hy-py',
version='0.0.3',
packages=['hy', 'hy.adapters'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
|
<commit_before>from distutils.core import setup
setup(
name='hy-py',
version='0.0.2',
packages=['hy'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
<commit_msg>Include the serializer adapters in the PyPI package<commit_after>
|
from distutils.core import setup
setup(
name='hy-py',
version='0.0.3',
packages=['hy', 'hy.adapters'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
|
from distutils.core import setup
setup(
name='hy-py',
version='0.0.2',
packages=['hy'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
Include the serializer adapters in the PyPI packagefrom distutils.core import setup
setup(
name='hy-py',
version='0.0.3',
packages=['hy', 'hy.adapters'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
|
<commit_before>from distutils.core import setup
setup(
name='hy-py',
version='0.0.2',
packages=['hy'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
<commit_msg>Include the serializer adapters in the PyPI package<commit_after>from distutils.core import setup
setup(
name='hy-py',
version='0.0.3',
packages=['hy', 'hy.adapters'],
license='MIT',
author='Joakim Ekberg',
author_email='jocke.ekberg@gmail.com',
url='https://github.com/kalasjocke/hy',
long_description=open('README.md').read(),
install_requires=open('requirements.txt').read().split(),
)
|
d03c10590dea3c6e38fbee9b3cadebff1a5f003e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
__version__ = '0.10'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9'],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
|
#!/usr/bin/env python
from setuptools import setup
__version__ = '0.1.0'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9',
],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
|
Fix version to conform to semver standard
|
Fix version to conform to semver standard
|
Python
|
mit
|
mattmurch/GuessThatSong,mattmurch/GuessThatSong
|
#!/usr/bin/env python
from setuptools import setup
__version__ = '0.10'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9'],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
Fix version to conform to semver standard
|
#!/usr/bin/env python
from setuptools import setup
__version__ = '0.1.0'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9',
],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
__version__ = '0.10'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9'],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
<commit_msg>Fix version to conform to semver standard<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
__version__ = '0.1.0'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9',
],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
|
#!/usr/bin/env python
from setuptools import setup
__version__ = '0.10'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9'],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
Fix version to conform to semver standard#!/usr/bin/env python
from setuptools import setup
__version__ = '0.1.0'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9',
],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
__version__ = '0.10'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9'],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
<commit_msg>Fix version to conform to semver standard<commit_after>#!/usr/bin/env python
from setuptools import setup
__version__ = '0.1.0'
setup(name='GuessThatSong',
version=__version__,
install_requires=['Flask==0.12.1',
'SQLAlchemy>=1.1.9',
],
description='Song Trivia Game Web Application',
long_description=open('README.md').read(),
author='Matt Murch',
author_email='mattmurch@gmail.com',
url='https://github.com/mattmurch/GuessThatSong',
download_url='https://github.com/mattmurch/GuessThatSong/tarball/' + __version__,
scripts=['run.py'],
license='MIT',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Flask',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Games/Entertainment',
],
keywords=('music trivia guessing game'),
packages=['app',
'db_repository',
],
)
|
c1b97bbc6fc0603c0f2a809175edf88cd1e4a207
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy'])
|
#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy', 'h5py', 'phonopy'])
|
Add requirement of h5py and phonopy
|
Add requirement of h5py and phonopy
|
Python
|
mit
|
yuzie007/ph_unfolder,yuzie007/upho
|
#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy'])
Add requirement of h5py and phonopy
|
#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy', 'h5py', 'phonopy'])
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy'])
<commit_msg>Add requirement of h5py and phonopy<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy', 'h5py', 'phonopy'])
|
#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy'])
Add requirement of h5py and phonopy#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy', 'h5py', 'phonopy'])
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy'])
<commit_msg>Add requirement of h5py and phonopy<commit_after>#!/usr/bin/env python
from distutils.core import setup
packages = [
'upho',
'upho.phonon',
'upho.harmonic',
'upho.analysis',
'upho.structure',
'upho.irreps',
'upho.qpoints',
'group',
]
scripts = [
'scripts/upho_weights',
'scripts/upho_sf',
'scripts/qpoints',
]
setup(name='upho',
version='0.5.1',
author="Yuji Ikeda",
author_email="ikeda.yuji.6m@kyoto-u.ac.jp",
packages=packages,
scripts=scripts,
install_requires=['numpy', 'h5py', 'phonopy'])
|
f890663daa329e3f22d0f619ed6acf9365308c7c
|
apps/ignite/views.py
|
apps/ignite/views.py
|
from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = Submission.objects.filter(
phase__challenge=challenge
).exclude(
is_draft=True
).extra(
order_by="?"
)
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
|
from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = (Submission.objects.visible()
.filter(phase__challenge=challenge)
.order_by("?"))
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
|
Update splash view to use visible() method.
|
Update splash view to use visible() method.
|
Python
|
bsd-3-clause
|
mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite
|
from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = Submission.objects.filter(
phase__challenge=challenge
).exclude(
is_draft=True
).extra(
order_by="?"
)
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
Update splash view to use visible() method.
|
from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = (Submission.objects.visible()
.filter(phase__challenge=challenge)
.order_by("?"))
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
|
<commit_before>from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = Submission.objects.filter(
phase__challenge=challenge
).exclude(
is_draft=True
).extra(
order_by="?"
)
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
<commit_msg>Update splash view to use visible() method.<commit_after>
|
from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = (Submission.objects.visible()
.filter(phase__challenge=challenge)
.order_by("?"))
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
|
from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = Submission.objects.filter(
phase__challenge=challenge
).exclude(
is_draft=True
).extra(
order_by="?"
)
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
Update splash view to use visible() method.from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = (Submission.objects.visible()
.filter(phase__challenge=challenge)
.order_by("?"))
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
|
<commit_before>from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = Submission.objects.filter(
phase__challenge=challenge
).exclude(
is_draft=True
).extra(
order_by="?"
)
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
<commit_msg>Update splash view to use visible() method.<commit_after>from django.shortcuts import get_object_or_404
import jingo
from challenges.models import Submission, Category
from projects.models import Project
def splash(request, project, slug, template_name='challenges/show.html'):
"""Show an individual project challenge."""
project = get_object_or_404(Project, slug=project)
challenge = get_object_or_404(project.challenge_set, slug=slug)
entries = (Submission.objects.visible()
.filter(phase__challenge=challenge)
.order_by("?"))
return jingo.render(request, 'ignite/splash.html', {
'challenge': challenge,
'project': project,
'phases': list(enumerate(challenge.phases.all(), start=1)),
'entries': entries[:10],
'categories': Category.objects.get_active_categories(),
})
|
d4b487ed1b276be230440e60ab3cdc81e73cff47
|
tests/unit/utils/test_utils.py
|
tests/unit/utils/test_utils.py
|
# coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
|
# coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
def test_get_module_environment_opts(self):
'''
Test for salt.utils.get_module_environment
:return:
'''
expectation = {'message': 'Melting hard drives'}
_globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
'__file__': '/daemons/loose/in/system.py'}
assert salt.utils.get_module_environment(_globals) == expectation
|
Add unit test to get opts from the environment
|
Add unit test to get opts from the environment
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
Add unit test to get opts from the environment
|
# coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
def test_get_module_environment_opts(self):
'''
Test for salt.utils.get_module_environment
:return:
'''
expectation = {'message': 'Melting hard drives'}
_globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
'__file__': '/daemons/loose/in/system.py'}
assert salt.utils.get_module_environment(_globals) == expectation
|
<commit_before># coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
<commit_msg>Add unit test to get opts from the environment<commit_after>
|
# coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
def test_get_module_environment_opts(self):
'''
Test for salt.utils.get_module_environment
:return:
'''
expectation = {'message': 'Melting hard drives'}
_globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
'__file__': '/daemons/loose/in/system.py'}
assert salt.utils.get_module_environment(_globals) == expectation
|
# coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
Add unit test to get opts from the environment# coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
def test_get_module_environment_opts(self):
'''
Test for salt.utils.get_module_environment
:return:
'''
expectation = {'message': 'Melting hard drives'}
_globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
'__file__': '/daemons/loose/in/system.py'}
assert salt.utils.get_module_environment(_globals) == expectation
|
<commit_before># coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
<commit_msg>Add unit test to get opts from the environment<commit_after># coding=utf-8
'''
Test case for utils/__init__.py
'''
from __future__ import unicode_literals, print_function, absolute_import
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
try:
import pytest
except ImportError:
pytest = None
import salt.utils
@skipIf(pytest is None, 'PyTest is missing')
class UtilsTestCase(TestCase):
'''
Test case for utils/__init__.py
'''
def test_get_module_environment_empty(self):
'''
Test for salt.utils.get_module_environment
Test if empty globals returns to an empty environment
with the correct type.
:return:
'''
out = salt.utils.get_module_environment({})
assert out == {}
assert isinstance(out, dict)
def test_get_module_environment_opts(self):
'''
Test for salt.utils.get_module_environment
:return:
'''
expectation = {'message': 'Melting hard drives'}
_globals = {'__opts__': {'system-environment': {'salt.in.system': expectation}},
'__file__': '/daemons/loose/in/system.py'}
assert salt.utils.get_module_environment(_globals) == expectation
|
22c6ab4806745ed2b9291dcbf4c0fbe8a72aaeed
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring'
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
long_description="Library that allows you get current power source type (AC, Battery or UPS), warning level (none, <22%, <10min) and remaining minutes. You can also observe changes of power source and remaining time.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
license="MIT License",
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring',
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
|
Add license, long description and fix classifiers.
|
Add license, long description and fix classifiers.
|
Python
|
mit
|
Kentzo/Power
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring'
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
Add license, long description and fix classifiers.
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
long_description="Library that allows you get current power source type (AC, Battery or UPS), warning level (none, <22%, <10min) and remaining minutes. You can also observe changes of power source and remaining time.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
license="MIT License",
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring',
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring'
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
<commit_msg>Add license, long description and fix classifiers.<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
long_description="Library that allows you get current power source type (AC, Battery or UPS), warning level (none, <22%, <10min) and remaining minutes. You can also observe changes of power source and remaining time.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
license="MIT License",
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring',
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring'
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
Add license, long description and fix classifiers.#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
long_description="Library that allows you get current power source type (AC, Battery or UPS), warning level (none, <22%, <10min) and remaining minutes. You can also observe changes of power source and remaining time.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
license="MIT License",
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring',
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring'
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
<commit_msg>Add license, long description and fix classifiers.<commit_after>#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="power",
version="1.2",
description="Cross-platform system power status information.",
long_description="Library that allows you get current power source type (AC, Battery or UPS), warning level (none, <22%, <10min) and remaining minutes. You can also observe changes of power source and remaining time.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
license="MIT License",
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: System :: Monitoring',
'Topic :: System :: Power (UPS)',
],
install_requires=REQUIREMENTS
)
|
665d980f62840fc0c8e531cab21faf8151d016d7
|
setup.py
|
setup.py
|
#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
|
#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon",
"psycopg2"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
|
Add pyscopg2 to list of dependencies
|
Add pyscopg2 to list of dependencies
Former-commit-id: afed58eea17319b11e3fafc1ef45c7cdf590fac0
Former-commit-id: 257ed272462ca52cc15bae9040296ace91e15843 [formerly 19b9a870795fb176a9fb49b427a00b70fc6e2b35] [formerly 5b2ece2b396282c63c2902d6128e3a1f2c982708 [formerly 1a662bf08f6e4b81939fe16c4422c7201c9394f5]]
Former-commit-id: 2336bd3da92c144e041fba0c6d8a06f8c5beb9d3 [formerly e493e91f63a6989ea1b5c3add83dec0a8a4de16b]
Former-commit-id: a778516564b1489efffbb822760e23ead82c4467
|
Python
|
mit
|
PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild
|
#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
Add pyscopg2 to list of dependencies
Former-commit-id: afed58eea17319b11e3fafc1ef45c7cdf590fac0
Former-commit-id: 257ed272462ca52cc15bae9040296ace91e15843 [formerly 19b9a870795fb176a9fb49b427a00b70fc6e2b35] [formerly 5b2ece2b396282c63c2902d6128e3a1f2c982708 [formerly 1a662bf08f6e4b81939fe16c4422c7201c9394f5]]
Former-commit-id: 2336bd3da92c144e041fba0c6d8a06f8c5beb9d3 [formerly e493e91f63a6989ea1b5c3add83dec0a8a4de16b]
Former-commit-id: a778516564b1489efffbb822760e23ead82c4467
|
#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon",
"psycopg2"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
|
<commit_before>#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
<commit_msg>Add pyscopg2 to list of dependencies
Former-commit-id: afed58eea17319b11e3fafc1ef45c7cdf590fac0
Former-commit-id: 257ed272462ca52cc15bae9040296ace91e15843 [formerly 19b9a870795fb176a9fb49b427a00b70fc6e2b35] [formerly 5b2ece2b396282c63c2902d6128e3a1f2c982708 [formerly 1a662bf08f6e4b81939fe16c4422c7201c9394f5]]
Former-commit-id: 2336bd3da92c144e041fba0c6d8a06f8c5beb9d3 [formerly e493e91f63a6989ea1b5c3add83dec0a8a4de16b]
Former-commit-id: a778516564b1489efffbb822760e23ead82c4467<commit_after>
|
#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon",
"psycopg2"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
|
#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
Add pyscopg2 to list of dependencies
Former-commit-id: afed58eea17319b11e3fafc1ef45c7cdf590fac0
Former-commit-id: 257ed272462ca52cc15bae9040296ace91e15843 [formerly 19b9a870795fb176a9fb49b427a00b70fc6e2b35] [formerly 5b2ece2b396282c63c2902d6128e3a1f2c982708 [formerly 1a662bf08f6e4b81939fe16c4422c7201c9394f5]]
Former-commit-id: 2336bd3da92c144e041fba0c6d8a06f8c5beb9d3 [formerly e493e91f63a6989ea1b5c3add83dec0a8a4de16b]
Former-commit-id: a778516564b1489efffbb822760e23ead82c4467#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon",
"psycopg2"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
|
<commit_before>#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
<commit_msg>Add pyscopg2 to list of dependencies
Former-commit-id: afed58eea17319b11e3fafc1ef45c7cdf590fac0
Former-commit-id: 257ed272462ca52cc15bae9040296ace91e15843 [formerly 19b9a870795fb176a9fb49b427a00b70fc6e2b35] [formerly 5b2ece2b396282c63c2902d6128e3a1f2c982708 [formerly 1a662bf08f6e4b81939fe16c4422c7201c9394f5]]
Former-commit-id: 2336bd3da92c144e041fba0c6d8a06f8c5beb9d3 [formerly e493e91f63a6989ea1b5c3add83dec0a8a4de16b]
Former-commit-id: a778516564b1489efffbb822760e23ead82c4467<commit_after>#!/usr/bin/evn python2
from setuptools import setup, find_packages
setup(name='pprof',
version='0.9.6',
packages=find_packages(),
install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2",
"regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6",
"virtualenv==13.1.0", "sphinxcontrib-napoleon",
"psycopg2"],
author="Andreas Simbuerger",
author_email="simbuerg@fim.uni-passau.de",
description="This is the experiment driver for the pprof study",
license="MIT",
entry_points={
'console_scripts': ['pprof=pprof.driver:main']
})
|
44b78b4eee2b62cb4308b266008d79c218a032ca
|
setup.py
|
setup.py
|
# coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-probability',
'transitions',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
|
# coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gcsfs',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-datasets',
'tensorflow-probability',
'transitions',
'zarr',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
|
Add gcsfs, tensorflow-datasets, zarr as dependencies.
|
Add gcsfs, tensorflow-datasets, zarr as dependencies.
PiperOrigin-RevId: 414517642
|
Python
|
apache-2.0
|
google/balloon-learning-environment
|
# coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-probability',
'transitions',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
Add gcsfs, tensorflow-datasets, zarr as dependencies.
PiperOrigin-RevId: 414517642
|
# coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gcsfs',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-datasets',
'tensorflow-probability',
'transitions',
'zarr',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
|
<commit_before># coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-probability',
'transitions',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
<commit_msg>Add gcsfs, tensorflow-datasets, zarr as dependencies.
PiperOrigin-RevId: 414517642<commit_after>
|
# coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gcsfs',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-datasets',
'tensorflow-probability',
'transitions',
'zarr',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
|
# coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-probability',
'transitions',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
Add gcsfs, tensorflow-datasets, zarr as dependencies.
PiperOrigin-RevId: 414517642# coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gcsfs',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-datasets',
'tensorflow-probability',
'transitions',
'zarr',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
|
<commit_before># coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-probability',
'transitions',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
<commit_msg>Add gcsfs, tensorflow-datasets, zarr as dependencies.
PiperOrigin-RevId: 414517642<commit_after># coding=utf-8
# Copyright 2021 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for installing the BLE."""
import setuptools
setuptools.setup(
name='balloon_learning_environment',
version='0.0.1',
packages=setuptools.find_packages(),
install_requires=[
'absl-py',
'dopamine-rl',
'flax',
'gcsfs',
'gin-config',
'gym',
'opensimplex',
's2sphere',
'scikit-learn',
'tensorflow',
'tensorflow-datasets',
'tensorflow-probability',
'transitions',
'zarr',
],
package_data={
'': ['*.msgpack', '*.pb'],
},
python_requires='>=3.7',
)
|
2b9e66c124b0d62f177a3875220714f4324ec0c0
|
setup.py
|
setup.py
|
"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.3',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
|
"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.4',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
|
Set up release to mesh-tensorflow 0.0.4.
|
Set up release to mesh-tensorflow 0.0.4.
PiperOrigin-RevId: 221204704
|
Python
|
apache-2.0
|
tensorflow/mesh,tensorflow/mesh
|
"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.3',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
Set up release to mesh-tensorflow 0.0.4.
PiperOrigin-RevId: 221204704
|
"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.4',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
|
<commit_before>"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.3',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
<commit_msg>Set up release to mesh-tensorflow 0.0.4.
PiperOrigin-RevId: 221204704<commit_after>
|
"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.4',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
|
"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.3',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
Set up release to mesh-tensorflow 0.0.4.
PiperOrigin-RevId: 221204704"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.4',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
|
<commit_before>"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.3',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
<commit_msg>Set up release to mesh-tensorflow 0.0.4.
PiperOrigin-RevId: 221204704<commit_after>"""Install Mesh TensorFlow."""
from setuptools import find_packages
from setuptools import setup
setup(
name='mesh-tensorflow',
version='0.0.4',
description='Mesh TensorFlow',
author='Google Inc.',
author_email='no-reply@google.com',
url='http://github.com/tensorflow/mesh',
license='Apache 2.0',
packages=find_packages(),
package_data={},
scripts=[],
install_requires=[
'future',
'six',
],
extras_require={
'tensorflow': ['tensorflow>=1.9.0'],
'tensorflow_gpu': ['tensorflow-gpu>=1.9.0'],
'tests': [
'absl-py',
'pytest',
'tensor2tensor>=1.9.0', # TODO(trandustin): rm dependence
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='tensorflow machine learning',
)
|
95ad476984d0bb095adf22f9486ab1f710370a53
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'scikit-learn',
'nltk',
'numpy',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
|
from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'numpy',
'scipy',
'scikit-learn',
'nltk',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
|
Add scipy in requirements list
|
Add scipy in requirements list
|
Python
|
mit
|
studiawan/pygraphc
|
from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'scikit-learn',
'nltk',
'numpy',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
Add scipy in requirements list
|
from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'numpy',
'scipy',
'scikit-learn',
'nltk',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'scikit-learn',
'nltk',
'numpy',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
<commit_msg>Add scipy in requirements list<commit_after>
|
from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'numpy',
'scipy',
'scikit-learn',
'nltk',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
|
from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'scikit-learn',
'nltk',
'numpy',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
Add scipy in requirements listfrom setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'numpy',
'scipy',
'scikit-learn',
'nltk',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'scikit-learn',
'nltk',
'numpy',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
<commit_msg>Add scipy in requirements list<commit_after>from setuptools import setup
setup(name='pygraphc',
version='0.0.1',
description='Event log clustering in Python',
long_description='This package contains event log clustering method including non-graph and '
'graph-based approaches.',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Security',
],
keywords='log clustering graph anomaly',
url='http://github.com/studiawan/pygraphc/',
author='Hudan Studiawan',
author_email='studiawan@gmail.com',
license='MIT',
packages=['pygraphc'],
scripts=['scripts/pygraphc'],
install_requires=[
'networkx',
'numpy',
'scipy',
'scikit-learn',
'nltk',
'Sphinx',
'numpydoc',
'TextBlob',
],
include_package_data=True,
zip_safe=False)
|
0e33f4ab0ad4cabc7b317fc1c17b187337ab958b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
|
from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests>=2.20.0<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
|
Update requests to address security concern
|
Update requests to address security concern
|
Python
|
apache-2.0
|
CitrineInformatics/python-citrination-client
|
from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
Update requests to address security concern
|
from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests>=2.20.0<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
|
<commit_before>from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
<commit_msg>Update requests to address security concern<commit_after>
|
from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests>=2.20.0<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
|
from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
Update requests to address security concernfrom setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests>=2.20.0<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
|
<commit_before>from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
<commit_msg>Update requests to address security concern<commit_after>from setuptools import setup, find_packages
setup(name='citrination-client',
version='4.6.0',
url='http://github.com/CitrineInformatics/python-citrination-client',
description='Python client for accessing the Citrination api',
packages=find_packages(exclude=('docs')),
install_requires=[
'requests>=2.20.0<3',
'pypif',
'six<2',
'pyyaml'
],
extras_require={
"dev": [
'sphinx_rtd_theme',
'sphinx',
],
"test": [
'requests_mock',
'pytest',
]
})
|
8155233ee78208b0c6707b4c075f1a334b781264
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt', 'pysqlite'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Add pysqlite to the dependencies
|
Add pysqlite to the dependencies
|
Python
|
apache-2.0
|
patricklaw/clortho
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Add pysqlite to the dependencies
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt', 'pysqlite'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Add pysqlite to the dependencies<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt', 'pysqlite'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Add pysqlite to the dependencies#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt', 'pysqlite'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Add pysqlite to the dependencies<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright 2012 ShopWiki
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup
VERSION = '0.0.0'
DESCRIPTION = 'Web Authentication with SQLAlchemy'
setup(
name='Clortho',
version=VERSION,
description=DESCRIPTION,
author='Patrick Lawson',
license='Apache 2',
author_email='plawson@shopwiki.com',
url='http://github.com/shopwiki/clortho',
packages=['clortho'],
install_requires=['sqlalchemy', 'py-bcrypt', 'pysqlite'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
d2699f79e544fdfee1745da00ad16a2950d6ee10
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from setuptools import setup
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
|
#! /usr/bin/env python
import sys
from setuptools import setup
if 'upload' in sys.argv:
if '--sign' not in sys.argv and sys.argv[1:] != ['upload', '--help']:
raise SystemExit('Refusing to upload unsigned packages.')
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
|
Add a "signature requirement" to the sdist upload command.
|
Add a "signature requirement" to the sdist upload command.
|
Python
|
mit
|
nejucomo/preconditions
|
#! /usr/bin/env python
from setuptools import setup
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
Add a "signature requirement" to the sdist upload command.
|
#! /usr/bin/env python
import sys
from setuptools import setup
if 'upload' in sys.argv:
if '--sign' not in sys.argv and sys.argv[1:] != ['upload', '--help']:
raise SystemExit('Refusing to upload unsigned packages.')
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
|
<commit_before>#! /usr/bin/env python
from setuptools import setup
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
<commit_msg>Add a "signature requirement" to the sdist upload command.<commit_after>
|
#! /usr/bin/env python
import sys
from setuptools import setup
if 'upload' in sys.argv:
if '--sign' not in sys.argv and sys.argv[1:] != ['upload', '--help']:
raise SystemExit('Refusing to upload unsigned packages.')
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
|
#! /usr/bin/env python
from setuptools import setup
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
Add a "signature requirement" to the sdist upload command.#! /usr/bin/env python
import sys
from setuptools import setup
if 'upload' in sys.argv:
if '--sign' not in sys.argv and sys.argv[1:] != ['upload', '--help']:
raise SystemExit('Refusing to upload unsigned packages.')
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
|
<commit_before>#! /usr/bin/env python
from setuptools import setup
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
<commit_msg>Add a "signature requirement" to the sdist upload command.<commit_after>#! /usr/bin/env python
import sys
from setuptools import setup
if 'upload' in sys.argv:
if '--sign' not in sys.argv and sys.argv[1:] != ['upload', '--help']:
raise SystemExit('Refusing to upload unsigned packages.')
PACKAGENAME = 'preconditions'
setup(
name=PACKAGENAME,
description='Flexible, concise preconditions.',
url='https://github.com/nejucomo/{0}'.format(PACKAGENAME),
license='MIT',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
py_modules=[PACKAGENAME],
test_suite='tests',
)
|
17fed2073cd89ca2d3ff8d182b360542e4cf6e1a
|
setup.py
|
setup.py
|
"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read() + open("HISTORY.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
|
"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
|
Remove changelog from release description
|
Remove changelog from release description
|
Python
|
mit
|
alisaifee/flask-limiter,alisaifee/flask-limiter
|
"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read() + open("HISTORY.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
Remove changelog from release description
|
"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
|
<commit_before>"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read() + open("HISTORY.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
<commit_msg>Remove changelog from release description<commit_after>
|
"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
|
"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read() + open("HISTORY.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
Remove changelog from release description"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
|
<commit_before>"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read() + open("HISTORY.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
<commit_msg>Remove changelog from release description<commit_after>"""
setup.py for Flask-Limiter
"""
__author__ = "Ali-Akber Saifee"
__email__ = "ali@indydevs.org"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
import versioneer
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(
None, open(os.path.join(this_dir, "requirements", "main.txt")).read().splitlines()
)
setup(
name="Flask-Limiter",
author=__author__,
author_email=__email__,
license="MIT",
url="https://flask-limiter.readthedocs.org",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=list(REQUIREMENTS),
classifiers=[k for k in open("CLASSIFIERS").read().split("\n") if k],
description="Rate limiting for flask applications",
long_description=open("README.rst").read(),
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.7",
)
|
d541611100740eceedae075e199bdf527761d01d
|
setup.py
|
setup.py
|
from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0alpha.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
|
from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
|
Tag version v1.0.0 -- first extracted version
|
Tag version v1.0.0 -- first extracted version
|
Python
|
apache-2.0
|
armstrong/armstrong.templates.tutorial
|
from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0alpha.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
Tag version v1.0.0 -- first extracted version
|
from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
|
<commit_before>from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0alpha.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
<commit_msg>Tag version v1.0.0 -- first extracted version<commit_after>
|
from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
|
from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0alpha.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
Tag version v1.0.0 -- first extracted versionfrom distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
|
<commit_before>from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0alpha.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
<commit_msg>Tag version v1.0.0 -- first extracted version<commit_after>from distutils.core import setup
import os
package_data = []
BASE_DIR = os.path.dirname(__file__)
walk_generator = os.walk(os.path.join(BASE_DIR, "project_template"))
paths_and_files = [(paths, files) for paths, dirs, files in walk_generator]
for path, files in paths_and_files:
prefix = path[path.find("project_template") + len("project_template/"):]
if files:
package_data.append(os.path.join(prefix, "*.*"))
setup(
name="armstrong.templates.tutorial",
version="1.0.0",
description="The tutorial project for Armstrong",
long_description=open("README.rst").read(),
author='Texas Tribune & Bay Citizen',
author_email='dev@armstrongcms.org',
packages=[
"armstrong.templates.tutorial",
],
package_dir={
"armstrong.templates.tutorial": "project_template",
},
package_data={
"armstrong.templates.tutorial": package_data,
},
namespace_packages=[
"armstrong",
"armstrong.templates",
"armstrong.templates.tutorial",
],
entry_points={
"armstrong.templates": [
"tutorial = armstrong.templates.tutorial",
],
},
)
|
4ac37e35396e2393a9bbe2e954674537747e384b
|
setup.py
|
setup.py
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % (120*int(time.time()/120))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=os.getenv(
'PAGEKITE_VERSION',
APPVER.replace('github', 'dev%d' % (120*int(time.time()/120)))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
|
Make it possible to manually override version numbers
|
Make it possible to manually override version numbers
|
Python
|
agpl-3.0
|
pagekite/PyPagekite,pagekite/PyPagekite,pagekite/PyPagekite
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % (120*int(time.time()/120))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
Make it possible to manually override version numbers
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=os.getenv(
'PAGEKITE_VERSION',
APPVER.replace('github', 'dev%d' % (120*int(time.time()/120)))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
|
<commit_before>#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % (120*int(time.time()/120))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
<commit_msg>Make it possible to manually override version numbers<commit_after>
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=os.getenv(
'PAGEKITE_VERSION',
APPVER.replace('github', 'dev%d' % (120*int(time.time()/120)))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % (120*int(time.time()/120))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
Make it possible to manually override version numbers#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=os.getenv(
'PAGEKITE_VERSION',
APPVER.replace('github', 'dev%d' % (120*int(time.time()/120)))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
|
<commit_before>#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % (120*int(time.time()/120))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
<commit_msg>Make it possible to manually override version numbers<commit_after>#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=os.getenv(
'PAGEKITE_VERSION',
APPVER.replace('github', 'dev%d' % (120*int(time.time()/120)))),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat', 'scripts/vipagekite'],
install_requires=['SocksipyChain >= 2.0.15']
)
|
dcbc65d31c63dc731480867419fb45c9a631dd15
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='livescrape',
version='0.9.4',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
|
from setuptools import setup
setup(
name='livescrape',
version='0.9.5',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
|
Prepare for 0.9.5 on pypi
|
Prepare for 0.9.5 on pypi
|
Python
|
mit
|
ondergetekende/livescrape
|
from setuptools import setup
setup(
name='livescrape',
version='0.9.4',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
Prepare for 0.9.5 on pypi
|
from setuptools import setup
setup(
name='livescrape',
version='0.9.5',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before>from setuptools import setup
setup(
name='livescrape',
version='0.9.4',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Prepare for 0.9.5 on pypi<commit_after>
|
from setuptools import setup
setup(
name='livescrape',
version='0.9.5',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
|
from setuptools import setup
setup(
name='livescrape',
version='0.9.4',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
Prepare for 0.9.5 on pypifrom setuptools import setup
setup(
name='livescrape',
version='0.9.5',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before>from setuptools import setup
setup(
name='livescrape',
version='0.9.4',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Prepare for 0.9.5 on pypi<commit_after>from setuptools import setup
setup(
name='livescrape',
version='0.9.5',
url='https://github.com/ondergetekende/python-livescrape',
description='A toolkit to build pythonic web scraper libraries',
author='Koert van der Veer',
author_email='koert@ondergetekende.nl',
py_modules=["livescrape"],
install_requires=["lxml", "requests", "cssselect", "six"],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2.7',
],
)
|
49e26262107abb58d7ca212270be36d52e8668e5
|
setup.py
|
setup.py
|
from setuptools import setup
import multiprocessing
version = '0.2.0'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
)
|
from setuptools import setup
import multiprocessing
version = '0.2.1'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
categories=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
categories=categories,
)
|
Change version with better package documentation
|
Change version with better package documentation
|
Python
|
mit
|
cloudwalkio/ddbscan,allanino/ddbscan
|
from setuptools import setup
import multiprocessing
version = '0.2.0'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
)Change version with better package documentation
|
from setuptools import setup
import multiprocessing
version = '0.2.1'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
categories=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
categories=categories,
)
|
<commit_before>from setuptools import setup
import multiprocessing
version = '0.2.0'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
)<commit_msg>Change version with better package documentation<commit_after>
|
from setuptools import setup
import multiprocessing
version = '0.2.1'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
categories=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
categories=categories,
)
|
from setuptools import setup
import multiprocessing
version = '0.2.0'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
)Change version with better package documentationfrom setuptools import setup
import multiprocessing
version = '0.2.1'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
categories=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
categories=categories,
)
|
<commit_before>from setuptools import setup
import multiprocessing
version = '0.2.0'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
)<commit_msg>Change version with better package documentation<commit_after>from setuptools import setup
import multiprocessing
version = '0.2.1'
install_requires = ["scipy >= 0.13.3",
"numpy >=1.8.1"]
tests_require = ["nose"]
categories=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
]
setup(
name='ddbscan',
version=version,
author='Allan Inocencio de Souza Costa',
author_email='allan@cloudwalk.io',
url='https://github.com/cloudwalkio/ddbscan',
packages=['ddbscan'],
license='MIT',
description='Discrete DBSCAN algorithm optimized for discrete and bounded data.',
long_description=open('README.rst').read(),
test_suite = 'nose.collector',
tests_require=tests_require,
install_requires=install_requires,
categories=categories,
)
|
ec794b23c25f27f2d0bd427e1e546f707f0b65ec
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
|
Add install-time error message for Python 3.1 and earlier.
|
Add install-time error message for Python 3.1 and earlier.
|
Python
|
mit
|
renstrom/python-jump-consistent-hash,renstrom/python-jump-consistent-hash,renstrom/python-jump-consistent-hash
|
from setuptools import setup
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
Add install-time error message for Python 3.1 and earlier.
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
|
<commit_before>from setuptools import setup
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
<commit_msg>Add install-time error message for Python 3.1 and earlier.<commit_after>
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
|
from setuptools import setup
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
Add install-time error message for Python 3.1 and earlier.from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
|
<commit_before>from setuptools import setup
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
<commit_msg>Add install-time error message for Python 3.1 and earlier.<commit_after>from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.0',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
])
|
1791407e845b7f552f9c4f87fa22116ea48f1892
|
setup.py
|
setup.py
|
from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '1.0.4' #'.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
Fix a hard-coded version number
|
Fix a hard-coded version number
|
Python
|
isc
|
zapier/django-rest-hooks,meric/django-rest-hooks,gvangool/django-rest-hooks,pombredanne/django-rest-hooks,PressLabs/django-rest-hooks-ng
|
from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '1.0.4' #'.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
Fix a hard-coded version number
|
from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
<commit_before>from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '1.0.4' #'.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
<commit_msg>Fix a hard-coded version number<commit_after>
|
from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '1.0.4' #'.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
Fix a hard-coded version numberfrom distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
<commit_before>from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '1.0.4' #'.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
<commit_msg>Fix a hard-coded version number<commit_after>from distutils.core import setup # setuptools breaks
# Dynamically calculate the version based on knowledge.VERSION
version_tuple = __import__('rest_hooks').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name = 'django-rest-hooks',
description = 'A powerful mechanism for sending real time API notifications via a new subscription model.',
version = version,
author = 'Bryan Helmig',
author_email = 'bryan@zapier.com',
url = 'http://github.com/zapier/django-rest-hooks',
install_requires=['Django>=1.4','requests'],
packages=['rest_hooks'],
package_data={
'rest_hooks': [
'migrations/*.py'
]
},
classifiers = ['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
1f1660f7a3c2ef4b87c306e71ba033a087add0bb
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.2.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
Adjust version in download url
|
Adjust version in download url
|
Python
|
mit
|
VonStruddle/PyHunter
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
Adjust version in download url
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.2.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
<commit_before>from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
<commit_msg>Adjust version in download url<commit_after>
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.2.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
Adjust version in download urlfrom setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.2.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
<commit_before>from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
<commit_msg>Adjust version in download url<commit_after>from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='quentin.durantay@gmail.com',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.2.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
)
|
8e67056ccd09a3b31edfb13bc38091606752c84d
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
|
from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg',
'pcbmode_config.json'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
|
Include pcbmode_config.json file in package
|
Include pcbmode_config.json file in package
|
Python
|
mit
|
ddm/pcbmode,boldport/pcbmode
|
from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
Include pcbmode_config.json file in package
|
from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg',
'pcbmode_config.json'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
<commit_msg>Include pcbmode_config.json file in package<commit_after>
|
from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg',
'pcbmode_config.json'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
|
from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
Include pcbmode_config.json file in packagefrom setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg',
'pcbmode_config.json'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
<commit_msg>Include pcbmode_config.json file in package<commit_after>from setuptools import setup, find_packages
setup(
name = "pcbmode",
packages = find_packages(),
use_scm_version=True,
setup_requires=['setuptools_scm'],
install_requires = ['lxml', 'pyparsing'],
package_data = {
'pcbmode': ['stackups/*.json',
'styles/*/*.json',
'fonts/*.svg',
'pcbmode_config.json'],
},
# metadata for upload to PyPI
author = "Saar Drimer",
author_email = "saardrimer@gmail.com",
description = "A printed circuit board design tool with a twist",
license = "MIT",
keywords = "pcb svg eda pcbmode",
url = "https://github.com/boldport/pcbmode",
entry_points={
'console_scripts': ['pcbmode = pcbmode.pcbmode:main']
},
zip_safe = True
)
|
974112e0ebd9397e3b3d3e49034465713b8df996
|
tools/lxlcrawler/lxlcrawler.py
|
tools/lxlcrawler/lxlcrawler.py
|
#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
|
#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('next') or data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
|
Handle both forms of next links
|
Handle both forms of next links
|
Python
|
cc0-1.0
|
Kungbib/datalab,Kungbib/datalab
|
#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
Handle both forms of next links
|
#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('next') or data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
|
<commit_before>#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
<commit_msg>Handle both forms of next links<commit_after>
|
#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('next') or data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
|
#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
Handle both forms of next links#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('next') or data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
|
<commit_before>#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
<commit_msg>Handle both forms of next links<commit_after>#!/usr/bin/env python3
from urllib.parse import urljoin
from urllib.request import urlopen, Request
import json
import codecs
reader = codecs.getreader("utf-8")
def crawl(collection_url):
while True:
req = Request(collection_url, headers={'accept': 'application/json'})
data = json.load(reader(urlopen(req)))
for item in data['items']:
yield item
next_page = data.get('next') or data.get('nextPage')
if next_page:
collection_url = urljoin(collection_url, next_page['@id'])
else:
break
if __name__ == '__main__':
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('url')
args = ap.parse_args()
for item in crawl(args.url):
print(item)
|
e249e1c03fab60c2f09a171924f3a1f701a0c7aa
|
astropy/tests/image_tests.py
|
astropy/tests/image_tests.py
|
import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x')
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
Add back mirror for image tests
|
Add back mirror for image tests
|
Python
|
bsd-3-clause
|
pllim/astropy,stargaser/astropy,MSeifert04/astropy,funbaker/astropy,larrybradley/astropy,astropy/astropy,pllim/astropy,lpsinger/astropy,saimn/astropy,MSeifert04/astropy,mhvk/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,bsipocz/astropy,astropy/astropy,lpsinger/astropy,bsipocz/astropy,DougBurke/astropy,saimn/astropy,aleksandr-bakanov/astropy,pllim/astropy,bsipocz/astropy,bsipocz/astropy,funbaker/astropy,MSeifert04/astropy,funbaker/astropy,funbaker/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,astropy/astropy,mhvk/astropy,larrybradley/astropy,pllim/astropy,stargaser/astropy,stargaser/astropy,mhvk/astropy,DougBurke/astropy,astropy/astropy,saimn/astropy,lpsinger/astropy,lpsinger/astropy,dhomeier/astropy,saimn/astropy,mhvk/astropy,DougBurke/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,saimn/astropy,larrybradley/astropy,dhomeier/astropy,DougBurke/astropy,stargaser/astropy,dhomeier/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,astropy/astropy,mhvk/astropy,larrybradley/astropy,StuartLittlefair/astropy,lpsinger/astropy,MSeifert04/astropy,pllim/astropy
|
import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x')
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
Add back mirror for image tests
|
import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
<commit_before>import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x')
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
<commit_msg>Add back mirror for image tests<commit_after>
|
import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x')
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
Add back mirror for image testsimport matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
<commit_before>import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x')
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
<commit_msg>Add back mirror for image tests<commit_after>import matplotlib
from matplotlib import pyplot as plt
from ..utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-02-01T23:31:45.013149/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
652fceb8f7d22c6cb22a81e4ce048ff8edd34e8b
|
migrations/versions/148_add_last_modified_column_for_events.py
|
migrations/versions/148_add_last_modified_column_for_events.py
|
"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 41f957b595fc
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '41f957b595fc'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
|
"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 486c7fa5b533
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '486c7fa5b533'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
|
Undo revision changes to previous migration
|
Undo revision changes to previous migration
|
Python
|
agpl-3.0
|
Eagles2F/sync-engine,gale320/sync-engine,closeio/nylas,ErinCall/sync-engine,gale320/sync-engine,jobscore/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,wakermahmud/sync-engine,gale320/sync-engine,nylas/sync-engine,nylas/sync-engine,wakermahmud/sync-engine,closeio/nylas,nylas/sync-engine,Eagles2F/sync-engine,closeio/nylas,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,jobscore/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,gale320/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine
|
"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 41f957b595fc
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '41f957b595fc'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
Undo revision changes to previous migration
|
"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 486c7fa5b533
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '486c7fa5b533'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
|
<commit_before>"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 41f957b595fc
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '41f957b595fc'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
<commit_msg>Undo revision changes to previous migration<commit_after>
|
"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 486c7fa5b533
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '486c7fa5b533'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
|
"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 41f957b595fc
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '41f957b595fc'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
Undo revision changes to previous migration"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 486c7fa5b533
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '486c7fa5b533'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
|
<commit_before>"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 41f957b595fc
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '41f957b595fc'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
<commit_msg>Undo revision changes to previous migration<commit_after>"""add last_modified column for events
Revision ID: 54dcea22a268
Revises: 486c7fa5b533
Create Date: 2015-03-16 23:15:55.908307
"""
# revision identifiers, used by Alembic.
revision = '54dcea22a268'
down_revision = '486c7fa5b533'
from alembic import op
from sqlalchemy.sql import text
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event ADD COLUMN last_modified DATETIME"))
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
conn.execute(text("ALTER TABLE event DROP COLUMN last_modified"))
|
926ee23ef946dc2eba0cae5321601c5fadad9e5e
|
examples/faceted_lineplot.py
|
examples/faceted_lineplot.py
|
"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Set up the FacetGrid with independent x axes
g = sns.FacetGrid(dots, col="align",
sharex=False, size=5, aspect=.75)
# Draw the lineplot on each facet
g.map_dataframe(sns.lineplot, "time", "firing_rate",
hue="coherence", size="choice",
size_order=["T1", "T2"],
palette=palette)
g.add_legend()
|
"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Plot the lines on two facets
sns.relplot(x="time", y="firing_rate",
hue="coherence", size="choice", col="align",
size_order=["T1", "T2"], palette=palette,
height=5, aspect=.75, facet_kws=dict(sharex=False),
kind="line", legend="full", data=dots)
|
Update dots lineplot example to use relplot
|
Update dots lineplot example to use relplot
|
Python
|
bsd-3-clause
|
sauliusl/seaborn,lukauskas/seaborn,petebachant/seaborn,phobson/seaborn,anntzer/seaborn,lukauskas/seaborn,mwaskom/seaborn,phobson/seaborn,arokem/seaborn,anntzer/seaborn,arokem/seaborn,mwaskom/seaborn
|
"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Set up the FacetGrid with independent x axes
g = sns.FacetGrid(dots, col="align",
sharex=False, size=5, aspect=.75)
# Draw the lineplot on each facet
g.map_dataframe(sns.lineplot, "time", "firing_rate",
hue="coherence", size="choice",
size_order=["T1", "T2"],
palette=palette)
g.add_legend()
Update dots lineplot example to use relplot
|
"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Plot the lines on two facets
sns.relplot(x="time", y="firing_rate",
hue="coherence", size="choice", col="align",
size_order=["T1", "T2"], palette=palette,
height=5, aspect=.75, facet_kws=dict(sharex=False),
kind="line", legend="full", data=dots)
|
<commit_before>"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Set up the FacetGrid with independent x axes
g = sns.FacetGrid(dots, col="align",
sharex=False, size=5, aspect=.75)
# Draw the lineplot on each facet
g.map_dataframe(sns.lineplot, "time", "firing_rate",
hue="coherence", size="choice",
size_order=["T1", "T2"],
palette=palette)
g.add_legend()
<commit_msg>Update dots lineplot example to use relplot<commit_after>
|
"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Plot the lines on two facets
sns.relplot(x="time", y="firing_rate",
hue="coherence", size="choice", col="align",
size_order=["T1", "T2"], palette=palette,
height=5, aspect=.75, facet_kws=dict(sharex=False),
kind="line", legend="full", data=dots)
|
"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Set up the FacetGrid with independent x axes
g = sns.FacetGrid(dots, col="align",
sharex=False, size=5, aspect=.75)
# Draw the lineplot on each facet
g.map_dataframe(sns.lineplot, "time", "firing_rate",
hue="coherence", size="choice",
size_order=["T1", "T2"],
palette=palette)
g.add_legend()
Update dots lineplot example to use relplot"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Plot the lines on two facets
sns.relplot(x="time", y="firing_rate",
hue="coherence", size="choice", col="align",
size_order=["T1", "T2"], palette=palette,
height=5, aspect=.75, facet_kws=dict(sharex=False),
kind="line", legend="full", data=dots)
|
<commit_before>"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Set up the FacetGrid with independent x axes
g = sns.FacetGrid(dots, col="align",
sharex=False, size=5, aspect=.75)
# Draw the lineplot on each facet
g.map_dataframe(sns.lineplot, "time", "firing_rate",
hue="coherence", size="choice",
size_order=["T1", "T2"],
palette=palette)
g.add_legend()
<commit_msg>Update dots lineplot example to use relplot<commit_after>"""
Line plots on multiple facets
=============================
_thumb: .45, .42
"""
import seaborn as sns
sns.set(style="ticks")
dots = sns.load_dataset("dots")
# Define a palette to ensure that colors will be
# shared across the facets
palette = dict(zip(dots.coherence.unique(),
sns.color_palette("rocket_r", 6)))
# Plot the lines on two facets
sns.relplot(x="time", y="firing_rate",
hue="coherence", size="choice", col="align",
size_order=["T1", "T2"], palette=palette,
height=5, aspect=.75, facet_kws=dict(sharex=False),
kind="line", legend="full", data=dots)
|
29fef644079a03fe0cfeb792dd47af7749382dba
|
unnaturalcode/http/__main__.py
|
unnaturalcode/http/__main__.py
|
#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
|
#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
try:
from unnaturalcode.http import unnaturalhttp
except ImportError:
import sys, os
# Oiugh.
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
|
Fix to allow invocation by `python unnaturalcode/http`
|
Fix to allow invocation by `python unnaturalcode/http`
|
Python
|
agpl-3.0
|
orezpraw/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/estimate-charm,naturalness/unnaturalcode,orezpraw/unnaturalcode
|
#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
Fix to allow invocation by `python unnaturalcode/http`
|
#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
try:
from unnaturalcode.http import unnaturalhttp
except ImportError:
import sys, os
# Oiugh.
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
|
<commit_before>#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
<commit_msg>Fix to allow invocation by `python unnaturalcode/http`<commit_after>
|
#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
try:
from unnaturalcode.http import unnaturalhttp
except ImportError:
import sys, os
# Oiugh.
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
|
#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
Fix to allow invocation by `python unnaturalcode/http`#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
try:
from unnaturalcode.http import unnaturalhttp
except ImportError:
import sys, os
# Oiugh.
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
|
<commit_before>#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
<commit_msg>Fix to allow invocation by `python unnaturalcode/http`<commit_after>#!/usr/bin/env python
# Copyright (C) 2014 Eddie Antonio Santos
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
try:
from unnaturalcode.http import unnaturalhttp
except ImportError:
import sys, os
# Oiugh.
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from unnaturalcode.http import unnaturalhttp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(unnaturalhttp)
app.run(host='0.0.0.0')
|
6bdad4defc78f984a293fe9e6e60eb33cb1aa541
|
espei/citing.py
|
espei/citing.py
|
"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1–10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
|
"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
|
Fix unicode in citation (again)
|
DOC: Fix unicode in citation (again)
|
Python
|
mit
|
PhasesResearchLab/ESPEI
|
"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1–10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
DOC: Fix unicode in citation (again)
|
"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
|
<commit_before>"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1–10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
<commit_msg>DOC: Fix unicode in citation (again)<commit_after>
|
"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
|
"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1–10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
DOC: Fix unicode in citation (again)"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
|
<commit_before>"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1–10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
<commit_msg>DOC: Fix unicode in citation (again)<commit_after>"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu–Mg}},
year = {2019}
}
"""
|
36e37a5409ef7fce9286f5fa9c24a185592df59a
|
health_check/contrib/celery/backends.py
|
health_check/contrib/celery/backends.py
|
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
add.forget()
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
Revert "Clean results task Health Check"
|
Revert "Clean results task Health Check"
This reverts commit 4d4148ea831d425327a3047ebb9be8c3129eaff6.
Close #269
|
Python
|
mit
|
KristianOellegaard/django-health-check,KristianOellegaard/django-health-check
|
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
add.forget()
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
Revert "Clean results task Health Check"
This reverts commit 4d4148ea831d425327a3047ebb9be8c3129eaff6.
Close #269
|
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
<commit_before>from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
add.forget()
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
<commit_msg>Revert "Clean results task Health Check"
This reverts commit 4d4148ea831d425327a3047ebb9be8c3129eaff6.
Close #269<commit_after>
|
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
add.forget()
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
Revert "Clean results task Health Check"
This reverts commit 4d4148ea831d425327a3047ebb9be8c3129eaff6.
Close #269from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
<commit_before>from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
add.forget()
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
<commit_msg>Revert "Clean results task Health Check"
This reverts commit 4d4148ea831d425327a3047ebb9be8c3129eaff6.
Close #269<commit_after>from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timeout,
queue=self.queue
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except NotImplementedError as e:
self.add_error(ServiceUnavailable("NotImplementedError: Make sure CELERY_RESULT_BACKEND is set"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
7229e9f43a94ab9336ef1dc2fe27a14fc6662a8b
|
knights/base.py
|
knights/base.py
|
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.nodelist,
'context': dict(context),
'filters': self.parser.filters,
'tags': self.parser.tags,
}
return ''.join(eval(self.code, global_ctx, {}))
|
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
ctx = dict(context, _filter=self.parser.filters, _tag=self.parser.tags)
global_ctx = {
'nodelist': self.nodelist,
'context': ctx,
}
return ''.join(eval(self.code, global_ctx, {}))
|
Add tags and filters into the context
|
Add tags and filters into the context
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.nodelist,
'context': dict(context),
'filters': self.parser.filters,
'tags': self.parser.tags,
}
return ''.join(eval(self.code, global_ctx, {}))
Add tags and filters into the context
|
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
ctx = dict(context, _filter=self.parser.filters, _tag=self.parser.tags)
global_ctx = {
'nodelist': self.nodelist,
'context': ctx,
}
return ''.join(eval(self.code, global_ctx, {}))
|
<commit_before>
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.nodelist,
'context': dict(context),
'filters': self.parser.filters,
'tags': self.parser.tags,
}
return ''.join(eval(self.code, global_ctx, {}))
<commit_msg>Add tags and filters into the context<commit_after>
|
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
ctx = dict(context, _filter=self.parser.filters, _tag=self.parser.tags)
global_ctx = {
'nodelist': self.nodelist,
'context': ctx,
}
return ''.join(eval(self.code, global_ctx, {}))
|
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.nodelist,
'context': dict(context),
'filters': self.parser.filters,
'tags': self.parser.tags,
}
return ''.join(eval(self.code, global_ctx, {}))
Add tags and filters into the context
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
ctx = dict(context, _filter=self.parser.filters, _tag=self.parser.tags)
global_ctx = {
'nodelist': self.nodelist,
'context': ctx,
}
return ''.join(eval(self.code, global_ctx, {}))
|
<commit_before>
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.nodelist,
'context': dict(context),
'filters': self.parser.filters,
'tags': self.parser.tags,
}
return ''.join(eval(self.code, global_ctx, {}))
<commit_msg>Add tags and filters into the context<commit_after>
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
ctx = dict(context, _filter=self.parser.filters, _tag=self.parser.tags)
global_ctx = {
'nodelist': self.nodelist,
'context': ctx,
}
return ''.join(eval(self.code, global_ctx, {}))
|
812d3cdace821a77fdcb2e0441ba5fa2650bf5fd
|
pybo/bayesopt/utils.py
|
pybo/bayesopt/utils.py
|
"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""Internal decorator to perform the annotation."""
f._params = set(args)
return f
return decorator
|
"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import inspect
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""
Internal decorator to perform the annotation.
"""
spec = inspect.getargspec(f)
params_valid = set(spec.args[::-1][:len(spec.defaults)])
params = set(args)
# make sure we're exposing valid parameters which are actually kwargs
# in the decorated function.
if not params.issubset(params_valid):
raise ValueError('exposed parameters are not valid kwargs: %r'
% list(params - params_valid))
# make sure we're not trying to expose rng.
if 'rng' in params:
raise ValueError("'rng' is a special parameter that "
"shouldn't be exposed")
f._params = args
return f
return decorator
|
Update params decorator with basic error checking.
|
Update params decorator with basic error checking.
|
Python
|
bsd-2-clause
|
mwhoffman/pybo,jhartford/pybo
|
"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""Internal decorator to perform the annotation."""
f._params = set(args)
return f
return decorator
Update params decorator with basic error checking.
|
"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import inspect
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""
Internal decorator to perform the annotation.
"""
spec = inspect.getargspec(f)
params_valid = set(spec.args[::-1][:len(spec.defaults)])
params = set(args)
# make sure we're exposing valid parameters which are actually kwargs
# in the decorated function.
if not params.issubset(params_valid):
raise ValueError('exposed parameters are not valid kwargs: %r'
% list(params - params_valid))
# make sure we're not trying to expose rng.
if 'rng' in params:
raise ValueError("'rng' is a special parameter that "
"shouldn't be exposed")
f._params = args
return f
return decorator
|
<commit_before>"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""Internal decorator to perform the annotation."""
f._params = set(args)
return f
return decorator
<commit_msg>Update params decorator with basic error checking.<commit_after>
|
"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import inspect
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""
Internal decorator to perform the annotation.
"""
spec = inspect.getargspec(f)
params_valid = set(spec.args[::-1][:len(spec.defaults)])
params = set(args)
# make sure we're exposing valid parameters which are actually kwargs
# in the decorated function.
if not params.issubset(params_valid):
raise ValueError('exposed parameters are not valid kwargs: %r'
% list(params - params_valid))
# make sure we're not trying to expose rng.
if 'rng' in params:
raise ValueError("'rng' is a special parameter that "
"shouldn't be exposed")
f._params = args
return f
return decorator
|
"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""Internal decorator to perform the annotation."""
f._params = set(args)
return f
return decorator
Update params decorator with basic error checking."""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import inspect
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""
Internal decorator to perform the annotation.
"""
spec = inspect.getargspec(f)
params_valid = set(spec.args[::-1][:len(spec.defaults)])
params = set(args)
# make sure we're exposing valid parameters which are actually kwargs
# in the decorated function.
if not params.issubset(params_valid):
raise ValueError('exposed parameters are not valid kwargs: %r'
% list(params - params_valid))
# make sure we're not trying to expose rng.
if 'rng' in params:
raise ValueError("'rng' is a special parameter that "
"shouldn't be exposed")
f._params = args
return f
return decorator
|
<commit_before>"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""Internal decorator to perform the annotation."""
f._params = set(args)
return f
return decorator
<commit_msg>Update params decorator with basic error checking.<commit_after>"""
Simple utilities for creating Bayesian optimization components.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import inspect
# exported symbols
__all__ = ['params']
def params(*args):
"""
Decorator for annotating a BO component with the parameters that can be
modified by the user.
"""
def decorator(f):
"""
Internal decorator to perform the annotation.
"""
spec = inspect.getargspec(f)
params_valid = set(spec.args[::-1][:len(spec.defaults)])
params = set(args)
# make sure we're exposing valid parameters which are actually kwargs
# in the decorated function.
if not params.issubset(params_valid):
raise ValueError('exposed parameters are not valid kwargs: %r'
% list(params - params_valid))
# make sure we're not trying to expose rng.
if 'rng' in params:
raise ValueError("'rng' is a special parameter that "
"shouldn't be exposed")
f._params = args
return f
return decorator
|
03201b992adfada04e4104611ee27b125c157eeb
|
apps/local_apps/account/context_processors.py
|
apps/local_apps/account/context_processors.py
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except Account.DoesNotExist:
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
Throw 500 error on multiple accounts in account context processor
|
Throw 500 error on multiple accounts in account context processor
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1121 45601e1e-1555-4799-bd40-45c8c71eef50
|
Python
|
mit
|
amarandon/pinax,alex/pinax,amarandon/pinax,amarandon/pinax,amarandon/pinax,alex/pinax,alex/pinax
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
Throw 500 error on multiple accounts in account context processor
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1121 45601e1e-1555-4799-bd40-45c8c71eef50
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except Account.DoesNotExist:
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
<commit_before>
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
<commit_msg>Throw 500 error on multiple accounts in account context processor
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1121 45601e1e-1555-4799-bd40-45c8c71eef50<commit_after>
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except Account.DoesNotExist:
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
Throw 500 error on multiple accounts in account context processor
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1121 45601e1e-1555-4799-bd40-45c8c71eef50
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except Account.DoesNotExist:
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
<commit_before>
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except (Account.DoesNotExist, Account.MultipleObjectsReturned):
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
<commit_msg>Throw 500 error on multiple accounts in account context processor
git-svn-id: 51ba99f60490c2ee9ba726ccda75a38950f5105d@1121 45601e1e-1555-4799-bd40-45c8c71eef50<commit_after>
from account.models import Account, AnonymousAccount
def openid(request):
return {'openid': request.openid}
def account(request):
if request.user.is_authenticated():
try:
account = Account._default_manager.get(user=request.user)
except Account.DoesNotExist:
account = AnonymousAccount(request)
else:
account = AnonymousAccount(request)
return {'account': account}
|
1c1d915596f5700a08efb6e5906a4ccfa0ddb932
|
tests/registryd/test_registry_startup.py
|
tests/registryd/test_registry_startup.py
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
|
Test that the registry's root has a null parent
|
Test that the registry's root has a null parent
|
Python
|
lgpl-2.1
|
GNOME/at-spi2-core,GNOME/at-spi2-core,GNOME/at-spi2-core
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
Test that the registry's root has a null parent
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
|
<commit_before># Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
<commit_msg>Test that the registry's root has a null parent<commit_after>
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
|
# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
Test that the registry's root has a null parent# Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
|
<commit_before># Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
<commit_msg>Test that the registry's root has a null parent<commit_after># Pytest will pick up this module automatically when running just "pytest".
#
# Each test_*() function gets passed test fixtures, which are defined
# in conftest.py. So, a function "def test_foo(bar)" will get a bar()
# fixture created for it.
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'
ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'
def get_property(proxy, iface_name, prop_name):
return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)
def test_accessible_iface_properties(registry, session_manager):
values = [
('Name', 'main'),
('Description', ''),
]
for prop_name, expected in values:
assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
def test_registry_root_has_null_parent(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'Parent') == ('', '/org/a11y/atspi/null')
def test_empty_registry_has_zero_children(registry, session_manager):
assert get_property(registry, ACCESSIBLE_IFACE, 'ChildCount') == 0
|
ea2d16c78eff88ba4a32a89793a7cd644e20cdb3
|
tools/perf/benchmarks/draw_properties.py
|
tools/perf/benchmarks/draw_properties.py
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference')
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference','win') # http://crbug.com/463111
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
|
Disable the "draw properties" benchmark
|
Disable the "draw properties" benchmark
We'd still like to be able to run this benchmark manually, but we don't
need it to be run automatically.
BUG=None
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect;tryserver.chromium.perf:mac_perf_bisect;tryserver.chromium.perf:win_perf_bisect;tryserver.chromium.perf:android_nexus5_perf_bisect
Review URL: https://codereview.chromium.org/1202383004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336012}
|
Python
|
bsd-3-clause
|
Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,chuan9/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference')
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference','win') # http://crbug.com/463111
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
Disable the "draw properties" benchmark
We'd still like to be able to run this benchmark manually, but we don't
need it to be run automatically.
BUG=None
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect;tryserver.chromium.perf:mac_perf_bisect;tryserver.chromium.perf:win_perf_bisect;tryserver.chromium.perf:android_nexus5_perf_bisect
Review URL: https://codereview.chromium.org/1202383004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336012}
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference')
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference','win') # http://crbug.com/463111
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
<commit_msg>Disable the "draw properties" benchmark
We'd still like to be able to run this benchmark manually, but we don't
need it to be run automatically.
BUG=None
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect;tryserver.chromium.perf:mac_perf_bisect;tryserver.chromium.perf:win_perf_bisect;tryserver.chromium.perf:android_nexus5_perf_bisect
Review URL: https://codereview.chromium.org/1202383004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336012}<commit_after>
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference')
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference','win') # http://crbug.com/463111
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
Disable the "draw properties" benchmark
We'd still like to be able to run this benchmark manually, but we don't
need it to be run automatically.
BUG=None
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect;tryserver.chromium.perf:mac_perf_bisect;tryserver.chromium.perf:win_perf_bisect;tryserver.chromium.perf:android_nexus5_perf_bisect
Review URL: https://codereview.chromium.org/1202383004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336012}# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference')
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
@benchmark.Disabled('reference','win') # http://crbug.com/463111
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
<commit_msg>Disable the "draw properties" benchmark
We'd still like to be able to run this benchmark manually, but we don't
need it to be run automatically.
BUG=None
CQ_EXTRA_TRYBOTS=tryserver.chromium.perf:linux_perf_bisect;tryserver.chromium.perf:mac_perf_bisect;tryserver.chromium.perf:win_perf_bisect;tryserver.chromium.perf:android_nexus5_perf_bisect
Review URL: https://codereview.chromium.org/1202383004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#336012}<commit_after># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from measurements import draw_properties
from telemetry import benchmark
import page_sets
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesToughScrolling(perf_benchmark.PerfBenchmark):
test = draw_properties.DrawProperties
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'draw_properties.tough_scrolling'
# This benchmark depends on tracing categories available in M43
# This benchmark is still useful for manual testing, but need not be enabled
# and run regularly.
@benchmark.Disabled()
class DrawPropertiesTop25(perf_benchmark.PerfBenchmark):
"""Measures the performance of computing draw properties from property trees.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = draw_properties.DrawProperties
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'draw_properties.top_25'
|
e8d7a81f74566775aa243a2441939f778b5c266d
|
frigg/builds/serializers.py
|
frigg/builds/serializers.py
|
from rest_framework import serializers
from .models import Build, BuildResult, Project
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
|
from rest_framework import serializers
from .models import Build, BuildResult, Project
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result'
)
class ProjectSerializer(serializers.ModelSerializer):
builds = BuildInlineSerializer(read_only=True, many=True)
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository',
'builds'
)
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
|
Add builds to project api
|
Add builds to project api
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
from rest_framework import serializers
from .models import Build, BuildResult, Project
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
Add builds to project api
|
from rest_framework import serializers
from .models import Build, BuildResult, Project
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result'
)
class ProjectSerializer(serializers.ModelSerializer):
builds = BuildInlineSerializer(read_only=True, many=True)
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository',
'builds'
)
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
|
<commit_before>from rest_framework import serializers
from .models import Build, BuildResult, Project
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
<commit_msg>Add builds to project api<commit_after>
|
from rest_framework import serializers
from .models import Build, BuildResult, Project
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result'
)
class ProjectSerializer(serializers.ModelSerializer):
builds = BuildInlineSerializer(read_only=True, many=True)
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository',
'builds'
)
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
|
from rest_framework import serializers
from .models import Build, BuildResult, Project
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
Add builds to project apifrom rest_framework import serializers
from .models import Build, BuildResult, Project
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result'
)
class ProjectSerializer(serializers.ModelSerializer):
builds = BuildInlineSerializer(read_only=True, many=True)
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository',
'builds'
)
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
|
<commit_before>from rest_framework import serializers
from .models import Build, BuildResult, Project
class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
<commit_msg>Add builds to project api<commit_after>from rest_framework import serializers
from .models import Build, BuildResult, Project
class BuildResultSerializer(serializers.ModelSerializer):
class Meta:
model = BuildResult
fields = (
'id',
'coverage',
'succeeded',
'tasks',
)
class BuildInlineSerializer(serializers.ModelSerializer):
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'build_number',
'branch',
'sha',
'pull_request_id',
'start_time',
'end_time',
'result'
)
class ProjectSerializer(serializers.ModelSerializer):
builds = BuildInlineSerializer(read_only=True, many=True)
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository',
'builds'
)
class ProjectInlineSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = (
'id',
'owner',
'name',
'private',
'approved',
'git_repository'
)
class BuildSerializer(serializers.ModelSerializer):
project = ProjectInlineSerializer(read_only=True)
result = BuildResultSerializer(read_only=True)
class Meta:
model = Build
fields = (
'id',
'project',
'result',
'build_number',
'branch',
'pull_request_id',
'sha',
'start_time',
'end_time',
)
|
8200beb4aa68a3e88a95394d2b8146ce264e1055
|
flask_authorization_panda/tests/test_basic_auth.py
|
flask_authorization_panda/tests/test_basic_auth.py
|
import json
from base64 import b64encode
import pytest
from flask import Flask
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return 'Hello World!'
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
|
from base64 import b64encode
import pytest
from flask import Flask, jsonify
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
def test_basic_auth(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '200' in response.data
|
Add unit test for successful completion.
|
Add unit test for successful completion.
|
Python
|
mit
|
eikonomega/flask-authorization-panda
|
import json
from base64 import b64encode
import pytest
from flask import Flask
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return 'Hello World!'
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
Add unit test for successful completion.
|
from base64 import b64encode
import pytest
from flask import Flask, jsonify
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
def test_basic_auth(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '200' in response.data
|
<commit_before>import json
from base64 import b64encode
import pytest
from flask import Flask
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return 'Hello World!'
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
<commit_msg>Add unit test for successful completion.<commit_after>
|
from base64 import b64encode
import pytest
from flask import Flask, jsonify
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
def test_basic_auth(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '200' in response.data
|
import json
from base64 import b64encode
import pytest
from flask import Flask
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return 'Hello World!'
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
Add unit test for successful completion.from base64 import b64encode
import pytest
from flask import Flask, jsonify
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
def test_basic_auth(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '200' in response.data
|
<commit_before>import json
from base64 import b64encode
import pytest
from flask import Flask
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return 'Hello World!'
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
<commit_msg>Add unit test for successful completion.<commit_after>from base64 import b64encode
import pytest
from flask import Flask, jsonify
from flask_authorization_panda import basic_auth
@pytest.fixture
def flask_app():
app = Flask(__name__)
app.config['TESTING'] = True
@app.route('/')
@basic_auth
def hello_world():
return jsonify({"statusCode": 200, "message": "Ok"})
return app
def test_no_credentials_in_application_config(flask_app):
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '500' in response.data
def test_no_credentials_in_request(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/')
assert "HTTP Basic Auth required for this URL" in response.data
def test_basic_auth(flask_app):
flask_app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
response = flask_app.test_client().get('/', headers={
'Authorization': 'Basic {}'.format(b64encode('admin:secret'))})
assert '200' in response.data
|
aa7f1acbfa6f0f776623a4b3b387b7c38282d1de
|
genes/docker/main.py
|
genes/docker/main.py
|
from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS = 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
|
from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS == 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
|
Change assignment to boolean equals
|
Change assignment to boolean equals
|
Python
|
mit
|
hatchery/genepool,hatchery/Genepool2
|
from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS = 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
Change assignment to boolean equals
|
from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS == 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
|
<commit_before>from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS = 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
<commit_msg>Change assignment to boolean equals<commit_after>
|
from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS == 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
|
from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS = 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
Change assignment to boolean equalsfrom genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS == 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
|
<commit_before>from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS = 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
<commit_msg>Change assignment to boolean equals<commit_after>from genes import apt
import platform
class Config:
OS = platform.system()
(DIST, _, CODE) = platform.linux_distribution()
REPO = DIST.lower() + '-' + CODE
def main():
if Config.OS == 'Linux':
if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':
apt.recv_key('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('docker.list', 'https://apt.dockerproject.org/repo', Config.REPO, 'main')
apt.update()
apt.install('docker-engine')
#FIXME: add compose, machine, etc
else:
#FIXME: print failure case
pass
elif Config.OS == 'Darwin':
#brew_cask.install('dockertoolbox')
pass
else:
#FIXME: print failure, handle osx/windows
pass
|
f360b8ada2783636c1c77f47fa9b982581a3c944
|
lib/arguments.py
|
lib/arguments.py
|
from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
parser.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
parser.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
|
from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
options = parser.add_mutually_exclusive_group()
options.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
options.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
|
Add "-h" and "-v" to mutually exclusive group
|
[CIKit] Add "-h" and "-v" to mutually exclusive group
|
Python
|
apache-2.0
|
BR0kEN-/cikit,BR0kEN-/cikit,BR0kEN-/cikit,BR0kEN-/cikit,BR0kEN-/cikit
|
from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
parser.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
parser.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
[CIKit] Add "-h" and "-v" to mutually exclusive group
|
from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
options = parser.add_mutually_exclusive_group()
options.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
options.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
|
<commit_before>from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
parser.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
parser.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
<commit_msg>[CIKit] Add "-h" and "-v" to mutually exclusive group<commit_after>
|
from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
options = parser.add_mutually_exclusive_group()
options.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
options.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
|
from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
parser.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
parser.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
[CIKit] Add "-h" and "-v" to mutually exclusive groupfrom os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
options = parser.add_mutually_exclusive_group()
options.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
options.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
|
<commit_before>from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
parser.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
parser.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
<commit_msg>[CIKit] Add "-h" and "-v" to mutually exclusive group<commit_after>from os import path
from actions import VersionAction
from argparse import ArgumentParser
from functions import parse_extra_vars
parser = ArgumentParser(
prog='cikit',
add_help=False,
)
parser.add_argument(
'playbook',
nargs='?',
default='',
help='The name of a playbook to run.',
)
options = parser.add_mutually_exclusive_group()
options.add_argument(
'-h',
action='help',
help='Show this help message and exit.',
)
options.add_argument(
'-v',
dest='%s/.version' % path.realpath(__file__ + '/..'),
action=VersionAction,
default='1.0.0',
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Run CIKit without passing the control to Ansible.',
)
parser.add_argument(
'--limit',
metavar='HOST',
nargs='?',
help=(
'The host to run a playbook at. The value of this option must '
'be an alias of a host from the "%%s/.cikit/inventory" file.'
),
)
args, argv = parser.parse_known_args()
args.extra = {}
parse_extra_vars(argv, args.extra)
# Duplicate the "limit" option as "extra" because some playbooks may
# require it and required options are checked within the "extra" only.
if args.limit:
args.extra['limit'] = args.limit
del argv
|
e8942651a43c7af1375b42ddd6521b4e65169b95
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]
|
from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
description = """A Chip-8 VM implemented in C++"""
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
self.run("ctest .")
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]
|
Add description and test build stage
|
Add description and test build stage
|
Python
|
mit
|
benvenutti/core8,benvenutti/core8
|
from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]Add description and test build stage
|
from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
description = """A Chip-8 VM implemented in C++"""
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
self.run("ctest .")
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]
|
<commit_before>from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]<commit_msg>Add description and test build stage<commit_after>
|
from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
description = """A Chip-8 VM implemented in C++"""
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
self.run("ctest .")
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]
|
from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]Add description and test build stagefrom conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
description = """A Chip-8 VM implemented in C++"""
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
self.run("ctest .")
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]
|
<commit_before>from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]<commit_msg>Add description and test build stage<commit_after>from conans import ConanFile, CMake
class Core8(ConanFile):
name = "core8"
version = "0.1"
url = "https://github.com/benvenutti/core8.git"
description = """A Chip-8 VM implemented in C++"""
settings = "os", "compiler", "build_type", "arch"
license = "MIT"
exports_sources = "*"
def build(self):
cmake = CMake(self.settings)
self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
self.run("ctest .")
def package(self):
self.copy("*.hpp", dst="include/core8", src="include/core8")
self.copy("*", dst="lib", src="lib")
def package_info(self):
self.cpp_info.libs = ["core8"]
|
2f91ba989260c0723c9b02bd8d48805db637e350
|
dockci/migrations/0002.py
|
dockci/migrations/0002.py
|
"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
Handle no config.yaml in migrations
|
Handle no config.yaml in migrations
|
Python
|
isc
|
RickyCook/paas-in-a-day-dockci,RickyCook/paas-in-a-day-dockci
|
"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
Handle no config.yaml in migrations
|
"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
<commit_before>"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
<commit_msg>Handle no config.yaml in migrations<commit_after>
|
"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
Handle no config.yaml in migrations"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
<commit_before>"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
<commit_msg>Handle no config.yaml in migrations<commit_after>"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
df9fdb39f78cd001b6f420d7c54c64886b378483
|
project/wsgi.py
|
project/wsgi.py
|
"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
application = get_wsgi_application()
|
"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(get_wsgi_application())
|
Configure WhiteNoise to serve static files
|
Configure WhiteNoise to serve static files
|
Python
|
apache-2.0
|
SethGreylyn/gwells,rstens/gwells,SethGreylyn/gwells,SethGreylyn/gwells,SethGreylyn/gwells,rstens/gwells,rstens/gwells,bcgov/gwells,rstens/gwells,bcgov/gwells,bcgov/gwells,bcgov/gwells
|
"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
application = get_wsgi_application()
Configure WhiteNoise to serve static files
|
"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(get_wsgi_application())
|
<commit_before>"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
application = get_wsgi_application()
<commit_msg>Configure WhiteNoise to serve static files<commit_after>
|
"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(get_wsgi_application())
|
"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
application = get_wsgi_application()
Configure WhiteNoise to serve static files"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(get_wsgi_application())
|
<commit_before>"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
application = get_wsgi_application()
<commit_msg>Configure WhiteNoise to serve static files<commit_after>"""
WSGI config for project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(get_wsgi_application())
|
185950bfada928476f3a1ec4ec8479b9583e13ef
|
numba/tests/npyufunc/test_parallel_env_variable.py
|
numba/tests/npyufunc/test_parallel_env_variable.py
|
from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_DEFAULT_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
|
from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
|
Reset the num threads to the env variable, not the default
|
Reset the num threads to the env variable, not the default
|
Python
|
bsd-2-clause
|
stuartarchibald/numba,seibert/numba,cpcloud/numba,sklam/numba,seibert/numba,stonebig/numba,numba/numba,sklam/numba,cpcloud/numba,gmarkall/numba,gmarkall/numba,IntelLabs/numba,seibert/numba,seibert/numba,IntelLabs/numba,cpcloud/numba,sklam/numba,gmarkall/numba,cpcloud/numba,IntelLabs/numba,numba/numba,stonebig/numba,stonebig/numba,stuartarchibald/numba,stonebig/numba,numba/numba,IntelLabs/numba,seibert/numba,numba/numba,gmarkall/numba,stonebig/numba,IntelLabs/numba,stuartarchibald/numba,sklam/numba,numba/numba,stuartarchibald/numba,sklam/numba,stuartarchibald/numba,cpcloud/numba,gmarkall/numba
|
from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_DEFAULT_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
Reset the num threads to the env variable, not the default
|
from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
|
<commit_before>from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_DEFAULT_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
<commit_msg>Reset the num threads to the env variable, not the default<commit_after>
|
from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
|
from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_DEFAULT_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
Reset the num threads to the env variable, not the defaultfrom numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
|
<commit_before>from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_DEFAULT_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
<commit_msg>Reset the num threads to the env variable, not the default<commit_after>from numba.np.ufunc.parallel import get_thread_count
from os import environ as env
from numba.core import config
import unittest
class TestParallelEnvVariable(unittest.TestCase):
"""
Tests environment variables related to the underlying "parallel"
functions for npyufuncs.
"""
_numba_parallel_test_ = False
def test_num_threads_variable(self):
"""
Tests the NUMBA_NUM_THREADS env variable behaves as expected.
"""
key = 'NUMBA_NUM_THREADS'
current = str(getattr(env, key, config.NUMBA_NUM_THREADS))
threads = "3154"
env[key] = threads
config.reload_config()
try:
self.assertEqual(threads, str(get_thread_count()))
self.assertEqual(threads, str(config.NUMBA_NUM_THREADS))
finally:
# reset the env variable/set to default
env[key] = current
config.reload_config()
if __name__ == '__main__':
unittest.main()
|
b67b677d4092e5bec445649321b142d31cfc0fb6
|
linkatos/activities.py
|
linkatos/activities.py
|
from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list)
return (expecting_url, url_cache)
|
from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def remove_url_from(url_cache_list, index):
url_cache_list.pop(index)
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list, index)
return (expecting_url, url_cache_list)
|
Add function to remove reacted to urls
|
feat: Add function to remove reacted to urls
|
Python
|
mit
|
iwi/linkatos,iwi/linkatos
|
from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list)
return (expecting_url, url_cache)
feat: Add function to remove reacted to urls
|
from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def remove_url_from(url_cache_list, index):
url_cache_list.pop(index)
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list, index)
return (expecting_url, url_cache_list)
|
<commit_before>from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list)
return (expecting_url, url_cache)
<commit_msg>feat: Add function to remove reacted to urls<commit_after>
|
from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def remove_url_from(url_cache_list, index):
url_cache_list.pop(index)
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list, index)
return (expecting_url, url_cache_list)
|
from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list)
return (expecting_url, url_cache)
feat: Add function to remove reacted to urlsfrom . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def remove_url_from(url_cache_list, index):
url_cache_list.pop(index)
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list, index)
return (expecting_url, url_cache_list)
|
<commit_before>from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list)
return (expecting_url, url_cache)
<commit_msg>feat: Add function to remove reacted to urls<commit_after>from . import parser
from . import printer
from . import firebase as fb
from . import reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_cache):
return url_cache is not None
def is_reaction(index):
return index is not None
def remove_url_from(url_cache_list, index):
url_cache_list.pop(index)
def event_consumer(expecting_url, url_cache_list, slack_client,
fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
if is_empty(events):
return (expecting_url, url_cache)
for event in events:
print(event)
if expecting_url and event['type'] == 'message':
new_url_cache = parser.parse_url_message(event)
url_cache_list.append(new_url_cache)
if is_url(new_url_cache):
printer.ask_confirmation(new_url_cache, slack_client)
if event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
index = react.is_confirmation(reaction['reaction'], url_cache_list,
reaction['to_id']):
if is_reaction(index):
react.handle(reaction['reaction'], url_cache_list[index]['url'],
fb_credentials, firebase)
remove_url_from(url_cache_list, index)
return (expecting_url, url_cache_list)
|
0c803c41fc7a54ddf0b8d1c580c39e7e2c325b8b
|
container/getPureElkIndex.py
|
container/getPureElkIndex.py
|
__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.indices.exists(index='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)
|
__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.exists(index='.kibana', doc_type='index-pattern',id='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)
|
Change the test for pureelk in elasticsearch
|
Change the test for pureelk in elasticsearch
It makes more sense to test for a specific index pattern rather than
pureelk-global-arrays index. The reason is if someone never adds an
array startup code will try to re-add .kibana data
|
Python
|
apache-2.0
|
pureelk/pureelk,pureelk/pureelk,pureelk/pureelk,pureelk/pureelk
|
__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.indices.exists(index='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)Change the test for pureelk in elasticsearch
It makes more sense to test for a specific index pattern rather than
pureelk-global-arrays index. The reason is if someone never adds an
array startup code will try to re-add .kibana data
|
__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.exists(index='.kibana', doc_type='index-pattern',id='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)
|
<commit_before>__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.indices.exists(index='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)<commit_msg>Change the test for pureelk in elasticsearch
It makes more sense to test for a specific index pattern rather than
pureelk-global-arrays index. The reason is if someone never adds an
array startup code will try to re-add .kibana data<commit_after>
|
__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.exists(index='.kibana', doc_type='index-pattern',id='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)
|
__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.indices.exists(index='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)Change the test for pureelk in elasticsearch
It makes more sense to test for a specific index pattern rather than
pureelk-global-arrays index. The reason is if someone never adds an
array startup code will try to re-add .kibana data__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.exists(index='.kibana', doc_type='index-pattern',id='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)
|
<commit_before>__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.indices.exists(index='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)<commit_msg>Change the test for pureelk in elasticsearch
It makes more sense to test for a specific index pattern rather than
pureelk-global-arrays index. The reason is if someone never adds an
array startup code will try to re-add .kibana data<commit_after>__author__ = 'terry'
import sys
from elasticsearch import Elasticsearch
import time
if __name__ == '__main__':
time.sleep(5)
# create a connection to the Elasticsearch database
client = Elasticsearch(['pureelk-elasticsearch:9200'], retry_on_timeout=True)
if client.exists(index='.kibana', doc_type='index-pattern',id='pureelk-global-arrays'):
sys.exit(0)
else:
sys.exit(1)
|
5ae97ea5eb7e07c9e967741bac5871379b643b39
|
nova/db/base.py
|
nova/db/base.py
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
super(Base, self).__init__()
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
|
Add super call to db Base class
|
Add super call to db Base class
Without this call, multiple inheritance involving the db Base
class does not work correctly.
Change-Id: Iac6b99d34f00babb8b66fede4977bf75f0ed61d4
|
Python
|
apache-2.0
|
joker946/nova,alexandrucoman/vbox-nova-driver,felixma/nova,watonyweng/nova,devendermishrajio/nova,joker946/nova,Juniper/nova,NeCTAR-RC/nova,BeyondTheClouds/nova,redhat-openstack/nova,Yusuke1987/openstack_template,bgxavier/nova,ted-gould/nova,redhat-openstack/nova,phenoxim/nova,tudorvio/nova,jeffrey4l/nova,scripnichenko/nova,whitepages/nova,nikesh-mahalka/nova,berrange/nova,kimjaejoong/nova,klmitch/nova,leilihh/nova,ted-gould/nova,edulramirez/nova,Francis-Liu/animated-broccoli,CloudServer/nova,vladikr/nova_drafts,rahulunair/nova,affo/nova,luogangyi/bcec-nova,TwinkleChawla/nova,hanlind/nova,yatinkumbhare/openstack-nova,rrader/nova-docker-plugin,JioCloud/nova,whitepages/nova,jianghuaw/nova,iuliat/nova,takeshineshiro/nova,jianghuaw/nova,affo/nova,mandeepdhami/nova,yosshy/nova,yatinkumbhare/openstack-nova,akash1808/nova_test_latest,orbitfp7/nova,mmnelemane/nova,silenceli/nova,adelina-t/nova,mikalstill/nova,maelnor/nova,Tehsmash/nova,JioCloud/nova_test_latest,mandeepdhami/nova,hanlind/nova,JioCloud/nova_test_latest,alexandrucoman/vbox-nova-driver,CiscoSystems/nova,TwinkleChawla/nova,rajalokan/nova,barnsnake351/nova,varunarya10/nova_test_latest,tealover/nova,dims/nova,sebrandon1/nova,raildo/nova,rahulunair/nova,berrange/nova,klmitch/nova,angdraug/nova,tealover/nova,thomasem/nova,blueboxgroup/nova,felixma/nova,nikesh-mahalka/nova,maelnor/nova,projectcalico/calico-nova,luogangyi/bcec-nova,CCI-MOC/nova,tianweizhang/nova,CloudServer/nova,Juniper/nova,NeCTAR-RC/nova,JioCloud/nova,rahulunair/nova,watonyweng/nova,belmiromoreira/nova,openstack/nova,hanlind/nova,iuliat/nova,gooddata/openstack-nova,cloudbase/nova,tangfeixiong/nova,CiscoSystems/nova,sebrandon1/nova,JianyuWang/nova,apporc/nova,rrader/nova-docker-plugin,eayunstack/nova,eayunstack/nova,shail2810/nova,klmitch/nova,fnordahl/nova,shail2810/nova,petrutlucian94/nova,zhimin711/nova,vladikr/nova_drafts,isyippee/nova,badock/nova,petrutlucian94/nova,openstack/nova,jeffrey4l/nova,eonpatapon/nova,tudorvio/nova,alvarolopez/nova,bigswitch/nova,double12gzh/nova,CEG-FYP-OpenStack/scheduler,scripnichenko/nova,virtualopensystems/nova,shahar-stratoscale/nova,BeyondTheClouds/nova,leilihh/novaha,zaina/nova,Juniper/nova,jianghuaw/nova,vmturbo/nova,dawnpower/nova,mikalstill/nova,openstack/nova,ruslanloman/nova,jianghuaw/nova,tianweizhang/nova,spring-week-topos/nova-week,saleemjaveds/https-github.com-openstack-nova,CCI-MOC/nova,viggates/nova,gooddata/openstack-nova,alaski/nova,Stavitsky/nova,eonpatapon/nova,Metaswitch/calico-nova,devendermishrajio/nova_test_latest,mahak/nova,zhimin711/nova,dims/nova,varunarya10/nova_test_latest,akash1808/nova,alaski/nova,orbitfp7/nova,tanglei528/nova,raildo/nova,mahak/nova,saleemjaveds/https-github.com-openstack-nova,Francis-Liu/animated-broccoli,apporc/nova,fnordahl/nova,zaina/nova,dawnpower/nova,akash1808/nova,thomasem/nova,cloudbase/nova,MountainWei/nova,sebrandon1/nova,j-carpentier/nova,cloudbase/nova-virtualbox,cernops/nova,badock/nova,yosshy/nova,mgagne/nova,kimjaejoong/nova,projectcalico/calico-nova,double12gzh/nova,takeshineshiro/nova,zzicewind/nova,isyippee/nova,Tehsmash/nova,mikalstill/nova,Juniper/nova,Stavitsky/nova,gooddata/openstack-nova,rajalokan/nova,noironetworks/nova,edulramirez/nova,vmturbo/nova,adelina-t/nova,cyx1231st/nova,mgagne/nova,belmiromoreira/nova,klmitch/nova,LoHChina/nova,ruslanloman/nova,viggates/nova,blueboxgroup/nova,bigswitch/nova,leilihh/nova,barnsnake351/nova,Metaswitch/calico-nova,angdraug/nova,alvarolopez/nova,bgxavier/nova,LoHChina/nova,rajalokan/nova,j-carpentier/nova,cernops/nova,ewindisch/nova,zzicewind/nova,cloudbase/nova,BeyondTheClouds/nova,tangfeixiong/nova,MountainWei/nova,noironetworks/nova,ewindisch/nova,akash1808/nova_test_latest,spring-week-topos/nova-week,shahar-stratoscale/nova,JianyuWang/nova,devendermishrajio/nova_test_latest,cloudbase/nova-virtualbox,rajalokan/nova,Yusuke1987/openstack_template,vmturbo/nova,eharney/nova,leilihh/novaha,devendermishrajio/nova,vmturbo/nova,eharney/nova,virtualopensystems/nova,tanglei528/nova,cyx1231st/nova,silenceli/nova,cernops/nova,mmnelemane/nova,gooddata/openstack-nova,mahak/nova,CEG-FYP-OpenStack/scheduler,phenoxim/nova
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
Add super call to db Base class
Without this call, multiple inheritance involving the db Base
class does not work correctly.
Change-Id: Iac6b99d34f00babb8b66fede4977bf75f0ed61d4
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
super(Base, self).__init__()
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
|
<commit_before># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
<commit_msg>Add super call to db Base class
Without this call, multiple inheritance involving the db Base
class does not work correctly.
Change-Id: Iac6b99d34f00babb8b66fede4977bf75f0ed61d4<commit_after>
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
super(Base, self).__init__()
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
Add super call to db Base class
Without this call, multiple inheritance involving the db Base
class does not work correctly.
Change-Id: Iac6b99d34f00babb8b66fede4977bf75f0ed61d4# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
super(Base, self).__init__()
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
|
<commit_before># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
<commit_msg>Add super call to db Base class
Without this call, multiple inheritance involving the db Base
class does not work correctly.
Change-Id: Iac6b99d34f00babb8b66fede4977bf75f0ed61d4<commit_after># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
super(Base, self).__init__()
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
|
ae855bd4ba0da5667e6ba43b59d457e62e4c0d99
|
tests/util.py
|
tests/util.py
|
from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
return test_decorated
|
from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
test_decorated.__name__ = fn.__name__
return test_decorated
|
Make must_fail fully compatible with nose
|
[r] Make must_fail fully compatible with nose
Originally, whenever you run a must_fail test alone and directly
with nose, you would get this error message:
'ValueError: no such test method in <test reference>: test_decorated'
|
Python
|
mit
|
vesln/robber.py
|
from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
return test_decorated
[r] Make must_fail fully compatible with nose
Originally, whenever you run a must_fail test alone and directly
with nose, you would get this error message:
'ValueError: no such test method in <test reference>: test_decorated'
|
from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
test_decorated.__name__ = fn.__name__
return test_decorated
|
<commit_before>from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
return test_decorated
<commit_msg>[r] Make must_fail fully compatible with nose
Originally, whenever you run a must_fail test alone and directly
with nose, you would get this error message:
'ValueError: no such test method in <test reference>: test_decorated'<commit_after>
|
from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
test_decorated.__name__ = fn.__name__
return test_decorated
|
from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
return test_decorated
[r] Make must_fail fully compatible with nose
Originally, whenever you run a must_fail test alone and directly
with nose, you would get this error message:
'ValueError: no such test method in <test reference>: test_decorated'from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
test_decorated.__name__ = fn.__name__
return test_decorated
|
<commit_before>from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
return test_decorated
<commit_msg>[r] Make must_fail fully compatible with nose
Originally, whenever you run a must_fail test alone and directly
with nose, you would get this error message:
'ValueError: no such test method in <test reference>: test_decorated'<commit_after>from robber import BadExpectation
from robber.matchers.base import Base
expectation_count = 0
fail_count = 0
old_match = Base.match
def new_match(self):
global expectation_count
expectation_count += 1
try:
old_match(self)
except BadExpectation:
global fail_count
fail_count += 1
def reset():
global expectation_count
global fail_count
Base.match = old_match
expectation_count = 0
fail_count = 0
def must_fail(fn):
"""
This checks if every expectation in a test fails.
"""
def test_decorated(self, *args, **kwargs):
Base.match = new_match
fn(self, *args, **kwargs)
message = 'The test has {expectation_count} expectations, only {fail_count} fails.'.format(
expectation_count=expectation_count, fail_count=fail_count
)
if fail_count < expectation_count:
reset()
raise BadExpectation(message)
reset()
test_decorated.__name__ = fn.__name__
return test_decorated
|
f1aabcad9e6f6daae23c158c2fba7b28f0e57416
|
message_view.py
|
message_view.py
|
import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
|
import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
assert panel
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
|
Fix mypy error by asserting
|
Fix mypy error by asserting
Since we just asked `is_panel_active`, the following `find_output_panel`
*must* succeed. So we `assert panel` to tell it mypy.
|
Python
|
mit
|
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
|
import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
Fix mypy error by asserting
Since we just asked `is_panel_active`, the following `find_output_panel`
*must* succeed. So we `assert panel` to tell it mypy.
|
import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
assert panel
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
|
<commit_before>import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
<commit_msg>Fix mypy error by asserting
Since we just asked `is_panel_active`, the following `find_output_panel`
*must* succeed. So we `assert panel` to tell it mypy.<commit_after>
|
import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
assert panel
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
|
import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
Fix mypy error by asserting
Since we just asked `is_panel_active`, the following `find_output_panel`
*must* succeed. So we `assert panel` to tell it mypy.import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
assert panel
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
|
<commit_before>import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
<commit_msg>Fix mypy error by asserting
Since we just asked `is_panel_active`, the following `find_output_panel`
*must* succeed. So we `assert panel` to tell it mypy.<commit_after>import sublime
import sublime_plugin
PANEL_NAME = "SublimeLinter Messages"
OUTPUT_PANEL = "output." + PANEL_NAME
def plugin_unloaded():
for window in sublime.windows():
window.destroy_output_panel(PANEL_NAME)
class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand):
def run(self, msg=""):
window = self.window
if is_panel_active(window):
panel = window.find_output_panel(PANEL_NAME)
assert panel
else:
panel = window.create_output_panel(PANEL_NAME)
syntax_path = "Packages/SublimeLinter/panel/message_view.sublime-syntax"
try: # Try the resource first, in case we're in the middle of an upgrade
sublime.load_resource(syntax_path)
except Exception:
return
panel.assign_syntax(syntax_path)
scroll_to = panel.size()
msg = msg.rstrip() + '\n\n\n'
panel.set_read_only(False)
panel.run_command('append', {'characters': msg})
panel.set_read_only(True)
panel.show(scroll_to)
window.run_command("show_panel", {"panel": OUTPUT_PANEL})
class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.destroy_output_panel(PANEL_NAME)
def is_panel_active(window):
return window.active_panel() == OUTPUT_PANEL
|
97506f25671c534eb643cf29f100e5f038d3fd64
|
EiCGraphAlgo/sindice/typeahead.py
|
EiCGraphAlgo/sindice/typeahead.py
|
'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['type']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))
|
'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['category']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))
|
Return empty list if nothing found
|
Return empty list if nothing found
|
Python
|
agpl-3.0
|
mmlab/eice,mmlab/eice,mmlab/eice,mmlab/eice
|
'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['type']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))Return empty list if nothing found
|
'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['category']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))
|
<commit_before>'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['type']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))<commit_msg>Return empty list if nothing found<commit_after>
|
'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['category']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))
|
'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['type']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))Return empty list if nothing found'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['category']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))
|
<commit_before>'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['type']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))<commit_msg>Return empty list if nothing found<commit_after>'''
Created on 17-sep.-2012
@author: ldevocht
'''
import urllib.request
import urllib.parse
import lxml.objectify
import logging
logger = logging.getLogger('pathFinder')
def dbPediaPrefix(prefix):
gateway = 'http://lookup.dbpedia.org/api/search.asmx/PrefixSearch?QueryString={0}'.format(prefix)
request = urllib.parse.quote(gateway, ':/=?<>"*&')
logger.debug('Request %s' % request)
raw_output = urllib.request.urlopen(request).read()
root = lxml.objectify.fromstring(raw_output)
results = list()
for result in root.Result:
if hasattr(result.Classes, 'Class'):
klasses = result.Classes.Class
if hasattr(klasses, 'Label'):
klasse = klasses
else:
klasse = klasses[0]
item = dict()
item['label'] = result.Label[0].text
item['category']=klasse.Label.text
item['value']=result.URI[0].text
results.append(item)
return results
print (dbPediaPrefix('Lon'))
|
c8a7b9acc6c66a44eeb9ceac91587bb8ad08ad89
|
pagedown/utils.py
|
pagedown/utils.py
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
Use `django.templatetags.static`to load the file
|
Use `django.templatetags.static`to load the file
Debugging this issue: https://github.com/timmyomahony/django-pagedown/issues/25
|
Python
|
bsd-3-clause
|
timmyomahony/django-pagedown,timmyomahony/django-pagedown,timmyomahony/django-pagedown
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)Use `django.templatetags.static`to load the file
Debugging this issue: https://github.com/timmyomahony/django-pagedown/issues/25
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
<commit_before>from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)<commit_msg>Use `django.templatetags.static`to load the file
Debugging this issue: https://github.com/timmyomahony/django-pagedown/issues/25<commit_after>
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)Use `django.templatetags.static`to load the file
Debugging this issue: https://github.com/timmyomahony/django-pagedown/issues/25from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
<commit_before>from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)<commit_msg>Use `django.templatetags.static`to load the file
Debugging this issue: https://github.com/timmyomahony/django-pagedown/issues/25<commit_after>from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
6dfc0d6cec2d0bd9d873f4e0854cee46414c37ec
|
marked_tests.py
|
marked_tests.py
|
import unittest
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
import unittest
import marked
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
Add import for marked to tests
|
Add import for marked to tests
|
Python
|
bsd-3-clause
|
1stvamp/marked
|
import unittest
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
Add import for marked to tests
|
import unittest
import marked
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Add import for marked to tests<commit_after>
|
import unittest
import marked
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
import unittest
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
Add import for marked to testsimport unittest
import marked
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Add import for marked to tests<commit_after>import unittest
import marked
class MarkedTests(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
3d06be213f7dc2c98cf77ddd497a603af2671e53
|
nbt/__init__.py
|
nbt/__init__.py
|
__all__ = ["chunk", "region", "world", "nbt"]
from . import *
VERSION = (1, 3)
def _get_version():
return ".".join(VERSION)
|
__all__ = ["nbt", "world", "region", "chunk"]
from . import *
VERSION = (1, 3)
def _get_version():
"""Return the NBT version as string."""
return ".".join([str(v) for v in VERSION])
|
Fix nbt._get_version() Function did not work (it tried to join integers as a string) Also re-order __all__ for documentation purposes
|
Fix nbt._get_version()
Function did not work (it tried to join integers as a string)
Also re-order __all__ for documentation purposes
|
Python
|
mit
|
twoolie/NBT,cburschka/NBT,macfreek/NBT,devmario/NBT,fwaggle/NBT
|
__all__ = ["chunk", "region", "world", "nbt"]
from . import *
VERSION = (1, 3)
def _get_version():
return ".".join(VERSION)
Fix nbt._get_version()
Function did not work (it tried to join integers as a string)
Also re-order __all__ for documentation purposes
|
__all__ = ["nbt", "world", "region", "chunk"]
from . import *
VERSION = (1, 3)
def _get_version():
"""Return the NBT version as string."""
return ".".join([str(v) for v in VERSION])
|
<commit_before>__all__ = ["chunk", "region", "world", "nbt"]
from . import *
VERSION = (1, 3)
def _get_version():
return ".".join(VERSION)
<commit_msg>Fix nbt._get_version()
Function did not work (it tried to join integers as a string)
Also re-order __all__ for documentation purposes<commit_after>
|
__all__ = ["nbt", "world", "region", "chunk"]
from . import *
VERSION = (1, 3)
def _get_version():
"""Return the NBT version as string."""
return ".".join([str(v) for v in VERSION])
|
__all__ = ["chunk", "region", "world", "nbt"]
from . import *
VERSION = (1, 3)
def _get_version():
return ".".join(VERSION)
Fix nbt._get_version()
Function did not work (it tried to join integers as a string)
Also re-order __all__ for documentation purposes__all__ = ["nbt", "world", "region", "chunk"]
from . import *
VERSION = (1, 3)
def _get_version():
"""Return the NBT version as string."""
return ".".join([str(v) for v in VERSION])
|
<commit_before>__all__ = ["chunk", "region", "world", "nbt"]
from . import *
VERSION = (1, 3)
def _get_version():
return ".".join(VERSION)
<commit_msg>Fix nbt._get_version()
Function did not work (it tried to join integers as a string)
Also re-order __all__ for documentation purposes<commit_after>__all__ = ["nbt", "world", "region", "chunk"]
from . import *
VERSION = (1, 3)
def _get_version():
"""Return the NBT version as string."""
return ".".join([str(v) for v in VERSION])
|
2f2ae3308256d2233e0363cb46ee88067da54b4b
|
modules/roles.py
|
modules/roles.py
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
if len(msg) != 1:
try:
await client.add_roles(message.author,message.server.roles[role[0]])
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
client.send_message(message.channel, msg)
else:
pass
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
if role_to_assign in message.author.roles:
await client.remove_roles(message.author,role_to_assign)
msg = ":ok_hand: Removed you from " + role_to_assign + " ."
else:
await client.add_roles(message.author,role_to_assign)
msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
await client.send_message(message.channel, msg)
else:
pass
|
Add role removal and logic cleanup
|
Add role removal and logic cleanup
|
Python
|
mit
|
suclearnub/scubot
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
if len(msg) != 1:
try:
await client.add_roles(message.author,message.server.roles[role[0]])
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
client.send_message(message.channel, msg)
else:
pass
Add role removal and logic cleanup
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
if role_to_assign in message.author.roles:
await client.remove_roles(message.author,role_to_assign)
msg = ":ok_hand: Removed you from " + role_to_assign + " ."
else:
await client.add_roles(message.author,role_to_assign)
msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
await client.send_message(message.channel, msg)
else:
pass
|
<commit_before>import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
if len(msg) != 1:
try:
await client.add_roles(message.author,message.server.roles[role[0]])
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
client.send_message(message.channel, msg)
else:
pass
<commit_msg>Add role removal and logic cleanup<commit_after>
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
if role_to_assign in message.author.roles:
await client.remove_roles(message.author,role_to_assign)
msg = ":ok_hand: Removed you from " + role_to_assign + " ."
else:
await client.add_roles(message.author,role_to_assign)
msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
await client.send_message(message.channel, msg)
else:
pass
|
import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
if len(msg) != 1:
try:
await client.add_roles(message.author,message.server.roles[role[0]])
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
client.send_message(message.channel, msg)
else:
pass
Add role removal and logic cleanupimport discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
if role_to_assign in message.author.roles:
await client.remove_roles(message.author,role_to_assign)
msg = ":ok_hand: Removed you from " + role_to_assign + " ."
else:
await client.add_roles(message.author,role_to_assign)
msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
await client.send_message(message.channel, msg)
else:
pass
|
<commit_before>import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
if len(msg) != 1:
try:
await client.add_roles(message.author,message.server.roles[role[0]])
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
client.send_message(message.channel, msg)
else:
pass
<commit_msg>Add role removal and logic cleanup<commit_after>import discord
import shlex
rolesTriggerString = '!role' # String to listen for as trigger
async def parse_roles_command(message, client):
server_roles = message.server.roles # Grab a list of all roles as Role objects
server_roles_str = [x.name for x in server_roles] # String-ify it into their names
msg = shlex.split(message.content)
role = [i for i,x in enumerate(server_roles_str) if x == msg[1]] # Check where in the list the role is
role_to_assign = message.server.roles[role[0]]
if len(msg) != 1:
try:
if role_to_assign in message.author.roles:
await client.remove_roles(message.author,role_to_assign)
msg = ":ok_hand: Removed you from " + role_to_assign + " ."
else:
await client.add_roles(message.author,role_to_assign)
msg = ":ok_hand: Added you to " + role_to_assign + " ."
except discord.DiscordException:
msg = "I'm sorry " + message.author.name + " ,I'm afraid I can't do that."
await client.send_message(message.channel, msg)
else:
pass
|
0602aad845f0a04cdc535b97b4860469f600d9b0
|
django_tablib/datasets.py
|
django_tablib/datasets.py
|
from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif type(headers) is dict:
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif type(headers) is list:
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
|
from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif isinstance(headers, dict):
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif isinstance(headers, list):
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
|
Use isinstance to check type
|
Use isinstance to check type
This should also allow to use subtypes like a SortedDict
to pass in headers.
|
Python
|
mit
|
joshourisman/django-tablib,joshourisman/django-tablib,ebrelsford/django-tablib,ebrelsford/django-tablib
|
from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif type(headers) is dict:
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif type(headers) is list:
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
Use isinstance to check type
This should also allow to use subtypes like a SortedDict
to pass in headers.
|
from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif isinstance(headers, dict):
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif isinstance(headers, list):
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
|
<commit_before>from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif type(headers) is dict:
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif type(headers) is list:
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
<commit_msg>Use isinstance to check type
This should also allow to use subtypes like a SortedDict
to pass in headers.<commit_after>
|
from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif isinstance(headers, dict):
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif isinstance(headers, list):
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
|
from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif type(headers) is dict:
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif type(headers) is list:
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
Use isinstance to check type
This should also allow to use subtypes like a SortedDict
to pass in headers.from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif isinstance(headers, dict):
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif isinstance(headers, list):
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
|
<commit_before>from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif type(headers) is dict:
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif type(headers) is list:
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
<commit_msg>Use isinstance to check type
This should also allow to use subtypes like a SortedDict
to pass in headers.<commit_after>from __future__ import absolute_import
from .base import BaseDataset
class SimpleDataset(BaseDataset):
def __init__(self, queryset, headers=None):
self.queryset = queryset
if headers is None:
fields = queryset.model._meta.fields
self.header_list = [field.name for field in fields]
self.attr_list = self.header_list
elif isinstance(headers, dict):
self.header_dict = headers
self.header_list = self.header_dict.keys()
self.attr_list = self.header_dict.values()
elif isinstance(headers, list):
self.header_list = headers
self.attr_list = headers
super(SimpleDataset, self).__init__()
|
bd7cf1f28d20fff3434325f0281d5a5a528983f8
|
mangopaysdk/entities/card.py
|
mangopaysdk/entities/card.py
|
from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
|
from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
self.UserId = None
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Country = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
|
Add extra missing items to the entity
|
Add extra missing items to the entity
|
Python
|
mit
|
chocopoche/mangopay2-python-sdk,Mangopay/mangopay2-python-sdk
|
from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
Add extra missing items to the entity
|
from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
self.UserId = None
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Country = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
|
<commit_before>from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
<commit_msg>Add extra missing items to the entity<commit_after>
|
from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
self.UserId = None
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Country = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
|
from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
Add extra missing items to the entityfrom mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
self.UserId = None
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Country = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
|
<commit_before>from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
<commit_msg>Add extra missing items to the entity<commit_after>from mangopaysdk.entities.entitybase import EntityBase
class Card(EntityBase):
"""Card entity"""
def __init__(self, id = None):
self.UserId = None
# MMYY
self.ExpirationDate = None
# first 6 and last 4 are real card numbers for example: 497010XXXXXX4414
self.Alias = None
# The card provider, it could be CB, VISA, MASTERCARD, etc.
self.CardProvider = None
# CardType enum
self.CardType = None
self.Country = None
self.Product = None
self.BankCode = None
# Boolean
self.Active = None
self.Currency = None
# UNKNOWN, VALID, INVALID
self.Validity = None
return super(Card, self).__init__(id)
|
a8c7067c3e3eb8931c9bdeb7a4a3e445ee6e338d
|
djasana/tests/test_connect.py
|
djasana/tests/test_connect.py
|
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
client_connect()
|
import requests
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
try:
client_connect()
except requests.exceptions.ConnectionError:
self.skipTest('No Internet connection')
|
Fix test for no connection - In the case of no connectivity, test will skip rather than fail
|
Fix test for no connection
- In the case of no connectivity, test will skip rather than fail
|
Python
|
mit
|
sbywater/django-asana
|
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
client_connect()
Fix test for no connection
- In the case of no connectivity, test will skip rather than fail
|
import requests
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
try:
client_connect()
except requests.exceptions.ConnectionError:
self.skipTest('No Internet connection')
|
<commit_before>import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
client_connect()
<commit_msg>Fix test for no connection
- In the case of no connectivity, test will skip rather than fail<commit_after>
|
import requests
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
try:
client_connect()
except requests.exceptions.ConnectionError:
self.skipTest('No Internet connection')
|
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
client_connect()
Fix test for no connection
- In the case of no connectivity, test will skip rather than failimport requests
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
try:
client_connect()
except requests.exceptions.ConnectionError:
self.skipTest('No Internet connection')
|
<commit_before>import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
client_connect()
<commit_msg>Fix test for no connection
- In the case of no connectivity, test will skip rather than fail<commit_after>import requests
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.test import override_settings
from asana.error import NoAuthorizationError
from djasana.connect import client_connect
class ClientConnectTestCase(unittest.TestCase):
@override_settings(ASANA_ACCESS_TOKEN=None, ASANA_CLIENT_ID=None)
def test_settings_required(self):
with self.assertRaises(ImproperlyConfigured):
client_connect()
@override_settings(ASANA_ACCESS_TOKEN='foo')
def test_connect_access_token(self):
with self.assertRaises(NoAuthorizationError):
try:
client_connect()
except requests.exceptions.ConnectionError:
self.skipTest('No Internet connection')
|
06a052c7f60fd413f39b8e313e44bfeea970896a
|
work/admin.py
|
work/admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
TEST allink_apps subtree - pulling
|
TEST allink_apps subtree - pulling
|
Python
|
bsd-3-clause
|
allink/allink-apps,allink/allink-apps
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
TEST allink_apps subtree - pulling
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
<commit_msg>TEST allink_apps subtree - pulling<commit_after>
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
TEST allink_apps subtree - pulling# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
<commit_msg>TEST allink_apps subtree - pulling<commit_after># -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from parler.admin import TranslatableTabularInline
from adminsortable.admin import SortableTabularInline
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from allink_core.allink_base.admin import AllinkBaseAdminSortable
from allink_apps.work.models import Work, Highlights
class HighlightsInline(SortableTabularInline, TranslatableTabularInline):
model = Highlights
extra = 1
max_num = 10
verbose_name = _(u'Highlight')
verbose_name_plural = _(u'Highlights')
@admin.register(Work)
class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable):
inlines = [HighlightsInline, ]
list_filter = ('is_active', 'categories',)
def get_fieldsets(self, request, obj=None):
fieldsets = (
(None, {
'fields': (
'is_active',
'title',
'slug',
'lead',
'preview_image',
),
}),
)
fieldsets += self.get_base_fieldsets()
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'lead':
kwargs['widget'] = forms.Textarea
return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
c811e0e02d06f8d5fd6a0b738546b0e200c706cd
|
fairseq/criterions/fairseq_criterion.py
|
fairseq/criterions/fairseq_criterion.py
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.task = task
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
|
Store task in the criterion base class
|
Store task in the criterion base class
Summary: Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/737
Differential Revision: D16377805
Pulled By: myleott
fbshipit-source-id: 1e090a02ff4fbba8695173f57d3cc5b88ae98bbf
|
Python
|
mit
|
pytorch/fairseq,pytorch/fairseq,pytorch/fairseq
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
Store task in the criterion base class
Summary: Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/737
Differential Revision: D16377805
Pulled By: myleott
fbshipit-source-id: 1e090a02ff4fbba8695173f57d3cc5b88ae98bbf
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.task = task
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
|
<commit_before># Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
<commit_msg>Store task in the criterion base class
Summary: Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/737
Differential Revision: D16377805
Pulled By: myleott
fbshipit-source-id: 1e090a02ff4fbba8695173f57d3cc5b88ae98bbf<commit_after>
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.task = task
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
Store task in the criterion base class
Summary: Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/737
Differential Revision: D16377805
Pulled By: myleott
fbshipit-source-id: 1e090a02ff4fbba8695173f57d3cc5b88ae98bbf# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.task = task
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
|
<commit_before># Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
<commit_msg>Store task in the criterion base class
Summary: Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/737
Differential Revision: D16377805
Pulled By: myleott
fbshipit-source-id: 1e090a02ff4fbba8695173f57d3cc5b88ae98bbf<commit_after># Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from torch.nn.modules.loss import _Loss
class FairseqCriterion(_Loss):
def __init__(self, args, task):
super().__init__()
self.args = args
self.task = task
self.padding_idx = task.target_dictionary.pad() if task.target_dictionary is not None else -100
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
pass
@classmethod
def build_criterion(cls, args, task):
return cls(args, task)
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
raise NotImplementedError
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
raise NotImplementedError
@staticmethod
def grad_denom(sample_sizes):
"""Compute the gradient denominator for a set of sample sizes."""
return sum(sample_sizes)
|
0d6706383b6414459cf158b213f4102fa3452b5a
|
pmxbot/slack.py
|
pmxbot/slack.py
|
import time
import importlib
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.handle_scheduled_tasks()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def handle_scheduled_tasks(self):
"stubbed"
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
import time
import importlib
from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
Implement scheduled task handling in Slack
|
Implement scheduled task handling in Slack
|
Python
|
mit
|
yougov/pmxbot,yougov/pmxbot,yougov/pmxbot
|
import time
import importlib
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.handle_scheduled_tasks()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def handle_scheduled_tasks(self):
"stubbed"
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
Implement scheduled task handling in Slack
|
import time
import importlib
from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
<commit_before>import time
import importlib
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.handle_scheduled_tasks()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def handle_scheduled_tasks(self):
"stubbed"
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
<commit_msg>Implement scheduled task handling in Slack<commit_after>
|
import time
import importlib
from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
import time
import importlib
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.handle_scheduled_tasks()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def handle_scheduled_tasks(self):
"stubbed"
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
Implement scheduled task handling in Slackimport time
import importlib
from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
<commit_before>import time
import importlib
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.handle_scheduled_tasks()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def handle_scheduled_tasks(self):
"stubbed"
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
<commit_msg>Implement scheduled task handling in Slack<commit_after>import time
import importlib
from tempora import schedule
import pmxbot
class Bot(pmxbot.core.Bot):
def __init__(self, server, port, nickname, channels, password=None):
token = pmxbot.config['slack token']
sc = importlib.import_module('slackclient')
self.client = sc.SlackClient(token)
self.scheduler = schedule.CallbackScheduler(self.handle_scheduled)
def start(self):
res = self.client.rtm_connect()
assert res, "Error connecting"
self.init_schedule(self.scheduler)
while True:
for msg in self.client.rtm_read():
self.handle_message(msg)
self.scheduler.run_pending()
time.sleep(0.1)
def handle_message(self, msg):
if msg.get('type') != 'message':
return
channel = self.client.server.channels.find(msg['channel']).name
nick = self.client.server.users.find(msg['user']).name
self.handle_action(channel, nick, msg['text'])
def transmit(self, channel, message):
channel = self.client.server.channels.find(channel)
channel.send_message(message)
|
938725a3693ee885a761e5ba07e75d2b94d78661
|
pytask/profile/urls.py
|
pytask/profile/urls.py
|
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notf/browse/$', 'browse_notifications',
name='edit_profile'),
url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification',
name='view_notification'),
url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification',
name='delete_notification'),
url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification',
name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
|
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notification/browse/$', 'browse_notifications',
name='browse_notifications'),
url(r'^notification/view/(?P<notification_id>\d+)$',
'view_notification', name='view_notification'),
url(r'^notification/delete/(?P<notification_id>\d+)$',
'delete_notification', name='delete_notification'),
url(r'^notification/unread/(?P<notification_id>\d+)$',
'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
|
Fix styling issue in URLConf.
|
Fix styling issue in URLConf.
|
Python
|
agpl-3.0
|
madhusudancs/pytask,madhusudancs/pytask,madhusudancs/pytask
|
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notf/browse/$', 'browse_notifications',
name='edit_profile'),
url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification',
name='view_notification'),
url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification',
name='delete_notification'),
url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification',
name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
Fix styling issue in URLConf.
|
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notification/browse/$', 'browse_notifications',
name='browse_notifications'),
url(r'^notification/view/(?P<notification_id>\d+)$',
'view_notification', name='view_notification'),
url(r'^notification/delete/(?P<notification_id>\d+)$',
'delete_notification', name='delete_notification'),
url(r'^notification/unread/(?P<notification_id>\d+)$',
'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
|
<commit_before>from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notf/browse/$', 'browse_notifications',
name='edit_profile'),
url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification',
name='view_notification'),
url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification',
name='delete_notification'),
url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification',
name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
<commit_msg>Fix styling issue in URLConf.<commit_after>
|
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notification/browse/$', 'browse_notifications',
name='browse_notifications'),
url(r'^notification/view/(?P<notification_id>\d+)$',
'view_notification', name='view_notification'),
url(r'^notification/delete/(?P<notification_id>\d+)$',
'delete_notification', name='delete_notification'),
url(r'^notification/unread/(?P<notification_id>\d+)$',
'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
|
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notf/browse/$', 'browse_notifications',
name='edit_profile'),
url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification',
name='view_notification'),
url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification',
name='delete_notification'),
url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification',
name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
Fix styling issue in URLConf.from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notification/browse/$', 'browse_notifications',
name='browse_notifications'),
url(r'^notification/view/(?P<notification_id>\d+)$',
'view_notification', name='view_notification'),
url(r'^notification/delete/(?P<notification_id>\d+)$',
'delete_notification', name='delete_notification'),
url(r'^notification/unread/(?P<notification_id>\d+)$',
'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
|
<commit_before>from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notf/browse/$', 'browse_notifications',
name='edit_profile'),
url(r'^notf/view/(?P<notification_id>\d+)$', 'view_notification',
name='view_notification'),
url(r'^notf/del/(?P<notification_id>\d+)$', 'delete_notification',
name='delete_notification'),
url(r'^notf/unr/(?P<notification_id>\d+)$', 'unread_notification',
name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
<commit_msg>Fix styling issue in URLConf.<commit_after>from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
urlpatterns = patterns('pytask.profile.views',
url(r'^view/$', 'view_profile', name='view_profile'),
url(r'^edit/$', 'edit_profile', name='edit_profile'),
url(r'^notification/browse/$', 'browse_notifications',
name='browse_notifications'),
url(r'^notification/view/(?P<notification_id>\d+)$',
'view_notification', name='view_notification'),
url(r'^notification/delete/(?P<notification_id>\d+)$',
'delete_notification', name='delete_notification'),
url(r'^notification/unread/(?P<notification_id>\d+)$',
'unread_notification', name='unread_notification'),
url(r'^user/view/(?P<user_id>\d+)$', 'view_user',
name='view_user'),
)
|
f415a411f748ce5a8eb142d862970e00d0267004
|
tests/test_environment.py
|
tests/test_environment.py
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
def test_env_should_come_with_jinja2_time_extension():
env = StrictEnvironment(keep_trailing_newline=True)
assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
|
Add a simple test to make sure cookiecutter comes with jinja2_time
|
Add a simple test to make sure cookiecutter comes with jinja2_time
|
Python
|
bsd-3-clause
|
Springerle/cookiecutter,dajose/cookiecutter,luzfcb/cookiecutter,michaeljoseph/cookiecutter,willingc/cookiecutter,luzfcb/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,audreyr/cookiecutter,audreyr/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,hackebrot/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,terryjbates/cookiecutter,stevepiercy/cookiecutter,stevepiercy/cookiecutter
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
Add a simple test to make sure cookiecutter comes with jinja2_time
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
def test_env_should_come_with_jinja2_time_extension():
env = StrictEnvironment(keep_trailing_newline=True)
assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
|
<commit_before># -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
<commit_msg>Add a simple test to make sure cookiecutter comes with jinja2_time<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
def test_env_should_come_with_jinja2_time_extension():
env = StrictEnvironment(keep_trailing_newline=True)
assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
Add a simple test to make sure cookiecutter comes with jinja2_time# -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
def test_env_should_come_with_jinja2_time_extension():
env = StrictEnvironment(keep_trailing_newline=True)
assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
|
<commit_before># -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
<commit_msg>Add a simple test to make sure cookiecutter comes with jinja2_time<commit_after># -*- coding: utf-8 -*-
import pytest
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UnknownExtension
def test_env_should_raise_for_unknown_extension():
context = {
'cookiecutter': {
'_extensions': ['foobar']
}
}
with pytest.raises(UnknownExtension) as err:
StrictEnvironment(context=context, keep_trailing_newline=True)
assert 'Unable to load extension: ' in str(err.value)
def test_env_should_come_with_jinja2_time_extension():
env = StrictEnvironment(keep_trailing_newline=True)
assert 'jinja2_time.jinja2_time.TimeExtension' in env.extensions
|
271ec40107411f6b7f8b6127adb06039228a9390
|
reports/serializers.py
|
reports/serializers.py
|
from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(e.message)
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
|
from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(str(e))
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
|
Fix error handing to be python 3 compatible
|
Fix error handing to be python 3 compatible
|
Python
|
bsd-3-clause
|
praekelt/hellomama-registration,praekelt/hellomama-registration
|
from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(e.message)
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
Fix error handing to be python 3 compatible
|
from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(str(e))
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
|
<commit_before>from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(e.message)
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
<commit_msg>Fix error handing to be python 3 compatible<commit_after>
|
from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(str(e))
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
|
from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(e.message)
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
Fix error handing to be python 3 compatiblefrom django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(str(e))
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
|
<commit_before>from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(e.message)
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
<commit_msg>Fix error handing to be python 3 compatible<commit_after>from django.utils import timezone
from rest_framework import serializers
from reports.utils import midnight, midnight_validator, one_month_after
class ReportGenerationSerializer(serializers.Serializer):
output_file = serializers.CharField()
start_date = serializers.CharField(allow_blank=True, required=False)
end_date = serializers.CharField(allow_blank=True, required=False)
email_to = serializers.ListField(
child=serializers.EmailField(), required=False)
email_from = serializers.EmailField(allow_blank=True, required=False)
email_subject = serializers.CharField(allow_blank=True, required=False)
def validate(self, data):
if 'start_date' not in data:
data['start_date'] = midnight(timezone.now())
if 'end_date' not in data:
data['end_date'] = one_month_after(data['start_date'])
return data
def validate_date(self, value):
try:
date = midnight_validator(value)
except ValueError as e:
raise serializers.ValidationError(str(e))
return date
def validate_start_date(self, value):
return self.validate_date(value)
def validate_end_date(self, value):
return self.validate_date(value)
|
54674b79fecf7eec4f09e885e7d68c9b6181efcf
|
mollie/api/objects/base.py
|
mollie/api/objects/base.py
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if not self[name]:
return None
return self[name]
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if name not in self:
return None
return self[name]
|
Make _get_property to return none when name does not exist
|
Make _get_property to return none when name does not exist
|
Python
|
bsd-2-clause
|
mollie/mollie-api-python
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if not self[name]:
return None
return self[name]
Make _get_property to return none when name does not exist
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if name not in self:
return None
return self[name]
|
<commit_before>class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if not self[name]:
return None
return self[name]
<commit_msg>Make _get_property to return none when name does not exist<commit_after>
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if name not in self:
return None
return self[name]
|
class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if not self[name]:
return None
return self[name]
Make _get_property to return none when name does not existclass Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if name not in self:
return None
return self[name]
|
<commit_before>class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if not self[name]:
return None
return self[name]
<commit_msg>Make _get_property to return none when name does not exist<commit_after>class Base(dict):
def _get_property(self, name):
"""Return the named property from dictionary values."""
if name not in self:
return None
return self[name]
|
13c3379717d1ad10a179f26838950090a2b6e4f4
|
pyxb/__init__.py
|
pyxb/__init__.py
|
"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
pass
#def __new__ (cls, *args, **kw):
# return object.__new__(cls)
from exceptions_ import *
|
"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
# Oh gross. If this class descends from unicode or string, we
# get here when object is *not* our direct superclass. In
# that case, we have to pass the arguments on up, or the
# strings don't get created right. Below is the only way I've
# figured out to detect the situation.
if self.__class__ != self.__class__.mro()[-1]:
super(cscRoot, self).__init__(*args, **kw)
from exceptions_ import *
|
Handle unicode and string creation correctly
|
Handle unicode and string creation correctly
|
Python
|
apache-2.0
|
jonfoster/pyxb-upstream-mirror,jonfoster/pyxb1,CantemoInternal/pyxb,jonfoster/pyxb-upstream-mirror,pabigot/pyxb,CantemoInternal/pyxb,jonfoster/pyxb2,balanced/PyXB,jonfoster/pyxb2,pabigot/pyxb,jonfoster/pyxb2,jonfoster/pyxb-upstream-mirror,jonfoster/pyxb1,balanced/PyXB,CantemoInternal/pyxb,balanced/PyXB
|
"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
pass
#def __new__ (cls, *args, **kw):
# return object.__new__(cls)
from exceptions_ import *
Handle unicode and string creation correctly
|
"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
# Oh gross. If this class descends from unicode or string, we
# get here when object is *not* our direct superclass. In
# that case, we have to pass the arguments on up, or the
# strings don't get created right. Below is the only way I've
# figured out to detect the situation.
if self.__class__ != self.__class__.mro()[-1]:
super(cscRoot, self).__init__(*args, **kw)
from exceptions_ import *
|
<commit_before>"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
pass
#def __new__ (cls, *args, **kw):
# return object.__new__(cls)
from exceptions_ import *
<commit_msg>Handle unicode and string creation correctly<commit_after>
|
"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
# Oh gross. If this class descends from unicode or string, we
# get here when object is *not* our direct superclass. In
# that case, we have to pass the arguments on up, or the
# strings don't get created right. Below is the only way I've
# figured out to detect the situation.
if self.__class__ != self.__class__.mro()[-1]:
super(cscRoot, self).__init__(*args, **kw)
from exceptions_ import *
|
"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
pass
#def __new__ (cls, *args, **kw):
# return object.__new__(cls)
from exceptions_ import *
Handle unicode and string creation correctly"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
# Oh gross. If this class descends from unicode or string, we
# get here when object is *not* our direct superclass. In
# that case, we have to pass the arguments on up, or the
# strings don't get created right. Below is the only way I've
# figured out to detect the situation.
if self.__class__ != self.__class__.mro()[-1]:
super(cscRoot, self).__init__(*args, **kw)
from exceptions_ import *
|
<commit_before>"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
pass
#def __new__ (cls, *args, **kw):
# return object.__new__(cls)
from exceptions_ import *
<commit_msg>Handle unicode and string creation correctly<commit_after>"""PyWXSB -- Python W3C XML Schema Bindings.
binding - Module used to generate the bindings and at runtime to
support the generated bindings
utils - Common utilities used in parsing, generating, and running
xmlschema - Class that convert XMLSchema from a DOM model to a Python
class model based on the XMLSchema components
"""
class cscRoot (object):
"""This little bundle of joy exists because in Python 2.6 it
became an error to invoke object.__init__ with parameters (unless
you also override __new__, in which case it's only a warning.
Whatever.). Since I'm bloody not going to check in every class
whether super(Myclass,self) refers to object (even if I could
figure out how to do that, 'cuz the obvious solutions don't work),
we'll just make this thing the root of all cooperative super
calling hierarchies."""
def __init__ (self, *args, **kw):
# Oh gross. If this class descends from unicode or string, we
# get here when object is *not* our direct superclass. In
# that case, we have to pass the arguments on up, or the
# strings don't get created right. Below is the only way I've
# figured out to detect the situation.
if self.__class__ != self.__class__.mro()[-1]:
super(cscRoot, self).__init__(*args, **kw)
from exceptions_ import *
|
f2d02748202571bdb3b993788ea218a1a522488d
|
python/setup.py
|
python/setup.py
|
from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.5',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
|
from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c', '../ci20.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.6',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
|
Fix issue with Python bindings caused by introduction of CI20 platform
|
Fix issue with Python bindings caused by introduction of CI20 platform
this adds ci20.c to the compile path for Python bindings, otherwise you
get the following:
ImportError:
/usr/lib/python3.4/site-packages/wiringX/gpio.cpython-34m.so: undefined
symbol: ci20Init
|
Python
|
mpl-2.0
|
mwischer/wiringX,mwischer/wiringX,bkrepo/wiringX,wiringX/wiringX,mxOBS/wiringX,mxOBS/wiringX,mwischer/wiringX,mxOBS/wiringX,bkrepo/wiringX,wiringX/wiringX,bkrepo/wiringX,wiringX/wiringX
|
from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.5',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
Fix issue with Python bindings caused by introduction of CI20 platform
this adds ci20.c to the compile path for Python bindings, otherwise you
get the following:
ImportError:
/usr/lib/python3.4/site-packages/wiringX/gpio.cpython-34m.so: undefined
symbol: ci20Init
|
from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c', '../ci20.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.6',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
|
<commit_before>from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.5',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
<commit_msg>Fix issue with Python bindings caused by introduction of CI20 platform
this adds ci20.c to the compile path for Python bindings, otherwise you
get the following:
ImportError:
/usr/lib/python3.4/site-packages/wiringX/gpio.cpython-34m.so: undefined
symbol: ci20Init<commit_after>
|
from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c', '../ci20.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.6',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
|
from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.5',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
Fix issue with Python bindings caused by introduction of CI20 platform
this adds ci20.c to the compile path for Python bindings, otherwise you
get the following:
ImportError:
/usr/lib/python3.4/site-packages/wiringX/gpio.cpython-34m.so: undefined
symbol: ci20Initfrom distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c', '../ci20.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.6',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
|
<commit_before>from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.5',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
<commit_msg>Fix issue with Python bindings caused by introduction of CI20 platform
this adds ci20.c to the compile path for Python bindings, otherwise you
get the following:
ImportError:
/usr/lib/python3.4/site-packages/wiringX/gpio.cpython-34m.so: undefined
symbol: ci20Init<commit_after>from distutils.core import setup
from distutils.core import Extension
from distutils.command.build_ext import build_ext as _build_ext
import sys
modules = [
Extension('wiringX.gpio', sources=['wiringX/wiringx.c', '../wiringX.c', '../hummingboard.c', '../bananapi.c', '../radxa.c', '../raspberrypi.c', '../ci20.c'], include_dirs=['../'], extra_compile_args=['-Wformat=0']),
]
setup(
name='wiringX',
version='0.6',
author='CurlyMo',
author_email='curlymoo1@gmail.com',
url='https://www.wiringx.org/',
license='GPLv2',
packages=['wiringX'],
description='Control GPIO and I2C',
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules=modules
)
|
2537cdf45650eb2d7d57d5e108a11658b4d64898
|
saleor/product/urls.py
|
saleor/product/urls.py
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[a-z0-9-]+?)/$', views.category_index,
name='category')
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[\w-]+?)/$', views.category_index,
name='category')
]
|
Fix url pattern for category's slug
|
Fix url pattern for category's slug
|
Python
|
bsd-3-clause
|
arth-co/saleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,rchav/vinerack,paweltin/saleor,avorio/saleor,arth-co/saleor,tfroehlich82/saleor,dashmug/saleor,maferelo/saleor,avorio/saleor,laosunhust/saleor,itbabu/saleor,KenMutemi/saleor,car3oon/saleor,Drekscott/Motlaesaleor,maferelo/saleor,taedori81/saleor,Drekscott/Motlaesaleor,UITools/saleor,itbabu/saleor,dashmug/saleor,taedori81/saleor,avorio/saleor,paweltin/saleor,spartonia/saleor,rodrigozn/CW-Shop,UITools/saleor,Drekscott/Motlaesaleor,jreigel/saleor,KenMutemi/saleor,mociepka/saleor,rodrigozn/CW-Shop,UITools/saleor,HyperManTT/ECommerceSaleor,arth-co/saleor,josesanch/saleor,laosunhust/saleor,paweltin/saleor,mociepka/saleor,Drekscott/Motlaesaleor,KenMutemi/saleor,josesanch/saleor,rodrigozn/CW-Shop,taedori81/saleor,josesanch/saleor,UITools/saleor,paweltin/saleor,car3oon/saleor,UITools/saleor,tfroehlich82/saleor,rchav/vinerack,laosunhust/saleor,car3oon/saleor,maferelo/saleor,laosunhust/saleor,spartonia/saleor,jreigel/saleor,rchav/vinerack,tfroehlich82/saleor,avorio/saleor,spartonia/saleor,taedori81/saleor,spartonia/saleor,dashmug/saleor,mociepka/saleor,itbabu/saleor,jreigel/saleor,arth-co/saleor
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[a-z0-9-]+?)/$', views.category_index,
name='category')
]
Fix url pattern for category's slug
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[\w-]+?)/$', views.category_index,
name='category')
]
|
<commit_before>from django.conf.urls import patterns, url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[a-z0-9-]+?)/$', views.category_index,
name='category')
]
<commit_msg>Fix url pattern for category's slug<commit_after>
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[\w-]+?)/$', views.category_index,
name='category')
]
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[a-z0-9-]+?)/$', views.category_index,
name='category')
]
Fix url pattern for category's slugfrom django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[\w-]+?)/$', views.category_index,
name='category')
]
|
<commit_before>from django.conf.urls import patterns, url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[a-z0-9-]+?)/$', views.category_index,
name='category')
]
<commit_msg>Fix url pattern for category's slug<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[a-z0-9-]+?)-(?P<product_id>[0-9]+)/$',
views.product_details, name='details'),
url(r'^category/(?P<slug>[\w-]+?)/$', views.category_index,
name='category')
]
|
28f6be52b429eb999a70ff900f526142fc5f162c
|
tests/test_exec_mixin.py
|
tests/test_exec_mixin.py
|
from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
|
from __future__ import with_statement
from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
|
Fix Python 2.5 support in tests
|
Fix Python 2.5 support in tests
|
Python
|
isc
|
gears/gears,gears/gears,gears/gears
|
from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
Fix Python 2.5 support in tests
|
from __future__ import with_statement
from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
|
<commit_before>from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
<commit_msg>Fix Python 2.5 support in tests<commit_after>
|
from __future__ import with_statement
from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
|
from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
Fix Python 2.5 support in testsfrom __future__ import with_statement
from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
|
<commit_before>from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
<commit_msg>Fix Python 2.5 support in tests<commit_after>from __future__ import with_statement
from gears.asset_handler import AssetHandlerError, ExecMixin
from mock import patch, Mock
from unittest2 import TestCase
class Exec(ExecMixin):
executable = 'program'
class ExecMixinTests(TestCase):
@patch('gears.asset_handler.Popen')
def test_returns_stdout_on_success(self, Popen):
result = Mock()
result.returncode = 0
result.communicate.return_value = ('output', '')
Popen.return_value = result
self.assertEqual(Exec().run('input'), 'output')
@patch('gears.asset_handler.Popen')
def test_raises_stderr_on_failure(self, Popen):
result = Mock()
result.returncode = 1
result.communicate.return_value = ('', 'error')
Popen.return_value = result
with self.assertRaises(AssetHandlerError):
Exec().run('input')
|
d83d2bb2ea9bc690a5b279a88fdc22fa23e6299a
|
tests/test_pagination.py
|
tests/test_pagination.py
|
import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
|
import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=1):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
|
Add wait for pagination test
|
Add wait for pagination test
|
Python
|
mit
|
ping/instagram_private_api_extensions
|
import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
Add wait for pagination test
|
import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=1):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
|
<commit_before>import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
<commit_msg>Add wait for pagination test<commit_after>
|
import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=1):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
|
import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
Add wait for pagination testimport unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=1):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
|
<commit_before>import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
<commit_msg>Add wait for pagination test<commit_after>import unittest
import sys
import os
try:
from instagram_private_api_extensions import pagination
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from instagram_private_api_extensions import pagination
class TestPagination(unittest.TestCase):
def test_page(self):
testset = ['a', 'b', 'c', 'd', 'e', 'f', 'h', 'i', 'j', 'k', 'l', 'm', 'n']
def paging_stub(start=0):
page_size = 3
result = {
'items': testset[start:start + page_size]
}
if len(testset) > start + page_size:
result['next_index'] = start + page_size
return result
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=0):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
resultset = []
for results in pagination.page(
paging_stub, args={},
cursor_key='start',
get_cursor=lambda r: r.get('next_index'),
wait=1):
if results.get('items'):
resultset.extend(results['items'])
self.assertEqual(testset, resultset)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.